[ 529.014055] env[62813]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62813) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 529.014488] env[62813]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62813) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 529.014488] env[62813]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62813) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 529.014806] env[62813]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 529.107509] env[62813]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62813) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 529.117579] env[62813]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62813) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 529.263169] env[62813]: INFO nova.virt.driver [None req-d101dd59-07c2-40df-ad46-d411ba356555 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 529.340550] env[62813]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.341106] env[62813]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.341106] env[62813]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62813) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 532.596604] env[62813]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-bd57f7a6-b246-4482-8047-ebd7dec10fac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.612758] env[62813]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62813) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 532.612984] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-3b488229-db96-4667-9587-4d1712a491eb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.644312] env[62813]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 87967. [ 532.644509] env[62813]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.304s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.645069] env[62813]: INFO nova.virt.vmwareapi.driver [None req-d101dd59-07c2-40df-ad46-d411ba356555 None None] VMware vCenter version: 7.0.3 [ 532.649580] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c4cde6-2c5a-4da6-bba2-4f226c3f85da {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.672082] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae06a63-f81d-45bd-93f2-d71a9ad42b2f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.678954] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72916845-b7c2-48bb-91c9-d5f35f298e12 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.685918] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e16c45-e706-4cea-bc5b-74079cc8afa5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.699198] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547ecfcc-9efc-43e2-8f77-2d9c407a7da2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.705692] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88446a3-3a98-416b-9c97-c8422385966d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.735927] env[62813]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-a617de17-7fc5-4aa3-b2e5-68ac75aa6737 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.741587] env[62813]: DEBUG nova.virt.vmwareapi.driver [None req-d101dd59-07c2-40df-ad46-d411ba356555 None None] Extension org.openstack.compute already exists. {{(pid=62813) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 532.744298] env[62813]: INFO nova.compute.provider_config [None req-d101dd59-07c2-40df-ad46-d411ba356555 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 532.764211] env[62813]: DEBUG nova.context [None req-d101dd59-07c2-40df-ad46-d411ba356555 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),1a5d0527-be79-4f45-ad70-6188fce89ae3(cell1) {{(pid=62813) load_cells /opt/stack/nova/nova/context.py:464}} [ 532.766289] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.766515] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.767248] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.767603] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Acquiring lock "1a5d0527-be79-4f45-ad70-6188fce89ae3" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.767792] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Lock "1a5d0527-be79-4f45-ad70-6188fce89ae3" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.768814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Lock "1a5d0527-be79-4f45-ad70-6188fce89ae3" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.788859] env[62813]: INFO dbcounter [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Registered counter for database nova_cell0 [ 532.797428] env[62813]: INFO dbcounter [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Registered counter for database nova_cell1 [ 532.800444] env[62813]: DEBUG oslo_db.sqlalchemy.engines [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62813) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 532.800762] env[62813]: DEBUG oslo_db.sqlalchemy.engines [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62813) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 532.805442] env[62813]: DEBUG dbcounter [-] [62813] Writer thread running {{(pid=62813) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 532.806181] env[62813]: DEBUG dbcounter [-] [62813] Writer thread running {{(pid=62813) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 532.808327] env[62813]: ERROR nova.db.main.api [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 532.808327] env[62813]: result = function(*args, **kwargs) [ 532.808327] env[62813]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.808327] env[62813]: return func(*args, **kwargs) [ 532.808327] env[62813]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 532.808327] env[62813]: result = fn(*args, **kwargs) [ 532.808327] env[62813]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 532.808327] env[62813]: return f(*args, **kwargs) [ 532.808327] env[62813]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 532.808327] env[62813]: return db.service_get_minimum_version(context, binaries) [ 532.808327] env[62813]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 532.808327] env[62813]: _check_db_access() [ 532.808327] env[62813]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 532.808327] env[62813]: stacktrace = ''.join(traceback.format_stack()) [ 532.808327] env[62813]: [ 532.809348] env[62813]: ERROR nova.db.main.api [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 532.809348] env[62813]: result = function(*args, **kwargs) [ 532.809348] env[62813]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.809348] env[62813]: return func(*args, **kwargs) [ 532.809348] env[62813]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 532.809348] env[62813]: result = fn(*args, **kwargs) [ 532.809348] env[62813]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 532.809348] env[62813]: return f(*args, **kwargs) [ 532.809348] env[62813]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 532.809348] env[62813]: return db.service_get_minimum_version(context, binaries) [ 532.809348] env[62813]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 532.809348] env[62813]: _check_db_access() [ 532.809348] env[62813]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 532.809348] env[62813]: stacktrace = ''.join(traceback.format_stack()) [ 532.809348] env[62813]: [ 532.810056] env[62813]: WARNING nova.objects.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 532.810056] env[62813]: WARNING nova.objects.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Failed to get minimum service version for cell 1a5d0527-be79-4f45-ad70-6188fce89ae3 [ 532.810270] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Acquiring lock "singleton_lock" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.810434] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Acquired lock "singleton_lock" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.810683] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Releasing lock "singleton_lock" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.810998] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Full set of CONF: {{(pid=62813) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 532.811161] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ******************************************************************************** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 532.811291] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] Configuration options gathered from: {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 532.811427] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 532.811627] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 532.811752] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ================================================================================ {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 532.811968] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] allow_resize_to_same_host = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.812153] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] arq_binding_timeout = 300 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.812288] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] backdoor_port = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.812416] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] backdoor_socket = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.812583] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] block_device_allocate_retries = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.812745] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] block_device_allocate_retries_interval = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.812963] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cert = self.pem {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.813095] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.813268] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute_monitors = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.813436] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] config_dir = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.813606] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] config_drive_format = iso9660 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.813743] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.813910] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] config_source = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.814093] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] console_host = devstack {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.814306] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] control_exchange = nova {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.814430] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cpu_allocation_ratio = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.814593] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] daemon = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.814765] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] debug = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.814924] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] default_access_ip_network_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.815140] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] default_availability_zone = nova {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.815308] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] default_ephemeral_format = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.815471] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] default_green_pool_size = 1000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816325] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816325] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] default_schedule_zone = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816325] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] disk_allocation_ratio = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816325] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] enable_new_services = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816622] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] enabled_apis = ['osapi_compute'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816622] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] enabled_ssl_apis = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816724] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] flat_injected = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.816886] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] force_config_drive = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.817060] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] force_raw_images = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.817235] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] graceful_shutdown_timeout = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.817401] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] heal_instance_info_cache_interval = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.817618] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] host = cpu-1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.817796] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.817963] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] initial_disk_allocation_ratio = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.818142] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] initial_ram_allocation_ratio = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.818357] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.818523] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_build_timeout = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.818687] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_delete_interval = 300 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.818853] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_format = [instance: %(uuid)s] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.819029] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_name_template = instance-%08x {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.819204] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_usage_audit = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.819375] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_usage_audit_period = month {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.819540] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.819704] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] instances_path = /opt/stack/data/nova/instances {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.819870] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] internal_service_availability_zone = internal {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820036] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] key = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820204] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] live_migration_retry_count = 30 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820366] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_config_append = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820531] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820718] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_dir = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820842] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.820969] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_options = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.821141] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_rotate_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.821309] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_rotate_interval_type = days {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.821494] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] log_rotation_type = none {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.821634] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.821761] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.821939] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.822127] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.822256] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.822420] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] long_rpc_timeout = 1800 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.822575] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] max_concurrent_builds = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.822734] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] max_concurrent_live_migrations = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.822890] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] max_concurrent_snapshots = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.823090] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] max_local_block_devices = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.823262] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] max_logfile_count = 30 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.823422] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] max_logfile_size_mb = 200 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.823582] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] maximum_instance_delete_attempts = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.823754] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metadata_listen = 0.0.0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.823922] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metadata_listen_port = 8775 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.824104] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metadata_workers = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.824348] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] migrate_max_retries = -1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.824487] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] mkisofs_cmd = genisoimage {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.824653] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] my_block_storage_ip = 10.180.1.21 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.824787] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] my_ip = 10.180.1.21 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.824968] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] network_allocate_retries = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.825169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.825343] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] osapi_compute_listen = 0.0.0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.825505] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] osapi_compute_listen_port = 8774 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.825672] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] osapi_compute_unique_server_name_scope = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.825840] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] osapi_compute_workers = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.826032] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] password_length = 12 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.826212] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] periodic_enable = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.826378] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] periodic_fuzzy_delay = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.826548] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] pointer_model = usbtablet {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.826715] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] preallocate_images = none {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.826876] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] publish_errors = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.827023] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] pybasedir = /opt/stack/nova {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.827182] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ram_allocation_ratio = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.827348] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] rate_limit_burst = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.827607] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] rate_limit_except_level = CRITICAL {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.827838] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] rate_limit_interval = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.828063] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reboot_timeout = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.828255] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reclaim_instance_interval = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.828419] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] record = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.828589] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reimage_timeout_per_gb = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.828757] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] report_interval = 120 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.828918] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] rescue_timeout = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.829088] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reserved_host_cpus = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.829255] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reserved_host_disk_mb = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.829412] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reserved_host_memory_mb = 512 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.829573] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] reserved_huge_pages = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.829734] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] resize_confirm_window = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.829927] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] resize_fs_using_block_device = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.830117] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] resume_guests_state_on_host_boot = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.830292] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.830456] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] rpc_response_timeout = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.830617] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] run_external_periodic_tasks = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.830845] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] running_deleted_instance_action = reap {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.830945] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] running_deleted_instance_poll_interval = 1800 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.831119] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] running_deleted_instance_timeout = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.831280] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler_instance_sync_interval = 120 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.831448] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_down_time = 720 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.831619] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] servicegroup_driver = db {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.831779] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] shelved_offload_time = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.831942] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] shelved_poll_interval = 3600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.832125] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] shutdown_timeout = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.832288] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] source_is_ipv6 = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.832449] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ssl_only = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.832697] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.832865] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] sync_power_state_interval = 600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.833067] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] sync_power_state_pool_size = 1000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.833255] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] syslog_log_facility = LOG_USER {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.833421] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] tempdir = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.833581] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] timeout_nbd = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.833749] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] transport_url = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.833911] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] update_resources_interval = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.834084] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_cow_images = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.834251] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_eventlog = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.834408] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_journal = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.834598] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_json = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.834723] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_rootwrap_daemon = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.834881] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_stderr = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.835076] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] use_syslog = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.835249] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vcpu_pin_set = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.835417] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plugging_is_fatal = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.835584] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plugging_timeout = 300 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.835748] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] virt_mkfs = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.835908] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] volume_usage_poll_interval = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.836109] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] watch_log_file = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.836284] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] web = /usr/share/spice-html5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 532.836470] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_concurrency.disable_process_locking = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.836754] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.836935] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.837117] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.837290] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.837458] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.837622] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.837802] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.auth_strategy = keystone {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.837966] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.compute_link_prefix = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.838154] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.838327] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.dhcp_domain = novalocal {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.838497] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.enable_instance_password = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.838662] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.glance_link_prefix = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.838840] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.839020] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.839190] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.instance_list_per_project_cells = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.839360] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.list_records_by_skipping_down_cells = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.839528] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.local_metadata_per_cell = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.839697] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.max_limit = 1000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.839865] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.metadata_cache_expiration = 15 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.840050] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.neutron_default_tenant_id = default {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.840222] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.use_neutron_default_nets = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.840391] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.840553] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.840717] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.840889] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.841072] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_dynamic_targets = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.841244] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_jsonfile_path = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.841425] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.841616] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.backend = dogpile.cache.memcached {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.841784] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.backend_argument = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.841954] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.config_prefix = cache.oslo {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.842138] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.dead_timeout = 60.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.842304] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.debug_cache_backend = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.842466] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.enable_retry_client = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.842627] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.enable_socket_keepalive = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.842793] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.enabled = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.842977] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.enforce_fips_mode = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.843174] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.expiration_time = 600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.843340] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.hashclient_retry_attempts = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.843522] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.hashclient_retry_delay = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.843691] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_dead_retry = 300 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.843856] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_password = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.844035] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.844214] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.844380] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_pool_maxsize = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.844546] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.844713] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_sasl_enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.844886] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.845091] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_socket_timeout = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.845268] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.memcache_username = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.845438] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.proxies = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.845601] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.redis_password = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.845775] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.redis_sentinel_service_name = mymaster {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.845962] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.846166] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.redis_server = localhost:6379 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.846335] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.redis_socket_timeout = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.846493] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.redis_username = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.846684] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.retry_attempts = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.846822] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.retry_delay = 0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.846986] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.socket_keepalive_count = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.847163] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.socket_keepalive_idle = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.847326] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.socket_keepalive_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.847482] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.tls_allowed_ciphers = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.847639] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.tls_cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.847796] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.tls_certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.847957] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.tls_enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.848132] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cache.tls_keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.848300] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.848473] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.auth_type = password {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.848634] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.848808] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.catalog_info = volumev3::publicURL {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.848970] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.849147] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.849307] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.cross_az_attach = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.849467] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.debug = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.849625] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.endpoint_template = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.849787] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.http_retries = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.849948] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.850119] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.850292] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.os_region_name = RegionOne {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.850455] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.850614] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cinder.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.850784] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.850945] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.cpu_dedicated_set = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.851152] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.cpu_shared_set = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.851279] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.image_type_exclude_list = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.851440] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.851601] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.max_concurrent_disk_ops = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.851767] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.max_disk_devices_to_attach = -1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.851931] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.852120] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.852284] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.resource_provider_association_refresh = 300 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.852447] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.852610] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.shutdown_retry_interval = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.852789] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.852989] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] conductor.workers = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.853196] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] console.allowed_origins = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.853363] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] console.ssl_ciphers = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.853533] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] console.ssl_minimum_version = default {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.853705] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] consoleauth.enforce_session_timeout = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.853869] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] consoleauth.token_ttl = 600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.854046] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.854211] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.854375] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.854535] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.854733] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.854942] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.855156] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.855324] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.855494] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.855650] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.855810] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.region_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.855992] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.856197] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.856376] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.service_type = accelerator {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.856541] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.856700] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.856860] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.857036] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.857222] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.857384] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] cyborg.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.857568] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.backend = sqlalchemy {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.857741] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.connection = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.857909] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.connection_debug = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.858093] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.connection_parameters = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.858266] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.connection_recycle_time = 3600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.858433] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.connection_trace = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.858598] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.db_inc_retry_interval = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.858761] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.db_max_retries = 20 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.858925] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.db_max_retry_interval = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.859102] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.db_retry_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.859272] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.max_overflow = 50 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.859440] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.max_pool_size = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.859603] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.max_retries = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.859799] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.860013] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.mysql_wsrep_sync_wait = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.pool_timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.retry_interval = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.slave_connection = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.sqlite_synchronous = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] database.use_db_reconnect = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.backend = sqlalchemy {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861798] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.connection = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861798] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.connection_debug = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861798] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.connection_parameters = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.861959] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.connection_recycle_time = 3600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.862170] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.connection_trace = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.862355] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.db_inc_retry_interval = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.862529] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.db_max_retries = 20 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.862695] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.db_max_retry_interval = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.862861] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.db_retry_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.863063] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.max_overflow = 50 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.863242] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.max_pool_size = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.863406] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.max_retries = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.863581] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.863747] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.863910] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.pool_timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.864090] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.retry_interval = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.864254] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.slave_connection = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.864416] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] api_database.sqlite_synchronous = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.864592] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] devices.enabled_mdev_types = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.864772] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.864951] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ephemeral_storage_encryption.default_format = luks {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.865147] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ephemeral_storage_encryption.enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.865319] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.865494] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.api_servers = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.865662] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.865825] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.866025] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.866201] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.866362] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.866527] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.debug = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.866693] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.default_trusted_certificate_ids = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.866856] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.enable_certificate_validation = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867027] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.enable_rbd_download = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867192] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867362] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867524] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867684] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867839] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.867999] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.num_retries = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.868186] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.rbd_ceph_conf = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.868345] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.rbd_connect_timeout = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.868514] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.rbd_pool = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.868678] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.rbd_user = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.868837] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.region_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.868995] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.869176] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.869363] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.service_type = image {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.869536] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.869700] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.869861] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.870030] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.870223] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.870388] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.verify_glance_signatures = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.870550] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] glance.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.870719] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] guestfs.debug = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.870895] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] mks.enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.871271] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.871468] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] image_cache.manager_interval = 2400 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.871639] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] image_cache.precache_concurrency = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.871810] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] image_cache.remove_unused_base_images = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.871982] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.872190] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.872370] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] image_cache.subdirectory_name = _base {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.872548] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.api_max_retries = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.872715] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.api_retry_interval = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.872878] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.873081] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.auth_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.873255] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.873412] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.873577] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.873740] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.conductor_group = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.873899] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.874068] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.874234] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.874430] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.874604] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.874770] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.874925] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.875139] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.peer_list = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.875331] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.region_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.875509] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.875676] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.serial_console_state_timeout = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.875838] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.876016] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.service_type = baremetal {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.876188] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.shard = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.876354] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.876515] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.876674] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.876841] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.877047] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.877220] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ironic.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.877406] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.877580] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] key_manager.fixed_key = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.877763] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.877928] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.barbican_api_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.878101] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.barbican_endpoint = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.878280] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.barbican_endpoint_type = public {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.878442] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.barbican_region_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.878600] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.878761] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.878926] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.879101] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.879266] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.879430] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.number_of_retries = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.879593] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.retry_delay = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.879755] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.send_service_user_token = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.879917] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.880089] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.880258] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.verify_ssl = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican.verify_ssl_path = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.auth_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882169] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] barbican_service_user.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.approle_role_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882390] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.approle_secret_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882564] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882564] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882622] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882836] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.882906] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.883143] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.kv_mountpoint = secret {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.883351] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.kv_path = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.883529] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.kv_version = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.883744] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.namespace = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.883942] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.root_token_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.884132] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.884297] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.ssl_ca_crt_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.884460] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.884627] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.use_ssl = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.884801] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.884990] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.885196] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.auth_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.885347] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.885508] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.885673] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.885835] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.886030] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.886216] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.886384] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.886546] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.886707] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.886866] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.887036] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.region_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.887201] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.887371] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.887543] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.service_type = identity {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.887707] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.887867] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.888038] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.888207] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.888426] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.888594] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] keystone.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.888797] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.connection_uri = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.888960] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_mode = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.889166] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_model_extra_flags = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.889343] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_models = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.889515] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_power_governor_high = performance {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.889685] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_power_governor_low = powersave {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.889850] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_power_management = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.890040] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.890236] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.device_detach_attempts = 8 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.890423] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.device_detach_timeout = 20 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.890575] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.disk_cachemodes = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.890735] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.disk_prefix = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.890899] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.enabled_perf_events = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.891072] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.file_backed_memory = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.891243] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.gid_maps = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.891403] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.hw_disk_discard = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.891562] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.hw_machine_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.891736] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_rbd_ceph_conf = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.891901] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.892074] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.892248] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_rbd_glance_store_name = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.892416] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_rbd_pool = rbd {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.892583] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_type = default {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.892742] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.images_volume_group = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.892972] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.inject_key = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.893100] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.inject_partition = -2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.893271] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.inject_password = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.893435] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.iscsi_iface = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.893597] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.iser_use_multipath = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.893758] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_bandwidth = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.893920] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.894117] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_downtime = 500 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.894299] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.894463] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.894625] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_inbound_addr = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.894786] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.894969] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_permit_post_copy = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.895163] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_scheme = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.895345] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_timeout_action = abort {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.895512] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_tunnelled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.895671] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_uri = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.895833] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.live_migration_with_native_tls = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.896029] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.max_queues = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.896210] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.896446] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.896609] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.nfs_mount_options = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.896905] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.897088] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.897260] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.num_iser_scan_tries = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.897423] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.num_memory_encrypted_guests = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.897586] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.897747] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.num_pcie_ports = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.897912] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.num_volume_scan_tries = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.898117] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.pmem_namespaces = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.898300] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.quobyte_client_cfg = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.898591] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.898764] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rbd_connect_timeout = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.898930] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.899106] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.899273] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rbd_secret_uuid = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.899430] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rbd_user = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.899592] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.899763] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.remote_filesystem_transport = ssh {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.899922] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rescue_image_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.900094] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rescue_kernel_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.900319] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rescue_ramdisk_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.900512] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.900674] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.rx_queue_size = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.900846] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.smbfs_mount_options = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.901142] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.901316] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.snapshot_compression = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.901479] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.snapshot_image_format = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.901699] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.901864] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.sparse_logical_volumes = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.902045] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.swtpm_enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.902245] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.swtpm_group = tss {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.902415] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.swtpm_user = tss {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.902583] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.sysinfo_serial = unique {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.902740] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.tb_cache_size = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.902897] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.tx_queue_size = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.903093] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.uid_maps = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.903265] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.use_virtio_for_bridges = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.903436] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.virt_type = kvm {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.903602] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.volume_clear = zero {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.903763] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.volume_clear_size = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.903925] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.volume_use_multipath = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.904097] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_cache_path = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.904270] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.904439] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_mount_group = qemu {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.904604] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_mount_opts = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.904771] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.905078] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.905271] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.vzstorage_mount_user = stack {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.905444] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.905622] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.905796] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.auth_type = password {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.905976] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.906191] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.906366] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.906533] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.906696] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.906870] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.default_floating_pool = public {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.907044] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.907215] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.extension_sync_interval = 600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.907379] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.http_retries = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.907541] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.907703] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.907863] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.908043] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.908211] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.908380] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.ovs_bridge = br-int {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.908546] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.physnets = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.908716] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.region_name = RegionOne {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.908879] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.909060] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.service_metadata_proxy = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.909225] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.909398] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.service_type = network {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.909559] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.909719] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.909879] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.910063] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.910270] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.910436] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] neutron.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.910610] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] notifications.bdms_in_notifications = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.910790] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] notifications.default_level = INFO {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.910967] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] notifications.notification_format = unversioned {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.911146] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] notifications.notify_on_state_change = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.911325] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.911501] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] pci.alias = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.911674] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] pci.device_spec = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.911839] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] pci.report_in_placement = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.912017] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.912207] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.auth_type = password {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.912376] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.912539] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.912697] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.912858] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.913062] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.913265] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.913439] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.default_domain_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.913601] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.default_domain_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.913760] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.domain_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.913920] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.domain_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.914114] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.914302] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.914467] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.914626] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.914812] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.915031] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.password = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.915201] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.project_domain_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.915376] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.project_domain_name = Default {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.915545] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.project_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.915720] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.project_name = service {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.915889] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.region_name = RegionOne {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.916066] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.916235] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.916407] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.service_type = placement {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.916572] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.916738] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.916904] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.917080] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.system_scope = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.917247] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.917408] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.trust_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.917568] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.user_domain_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.917739] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.user_domain_name = Default {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.917901] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.user_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.918106] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.username = placement {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.918308] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.918473] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] placement.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.918653] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.cores = 20 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.918823] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.count_usage_from_placement = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.918997] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.919188] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.injected_file_content_bytes = 10240 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.919356] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.injected_file_path_length = 255 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.919521] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.injected_files = 5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.919685] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.instances = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.919849] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.key_pairs = 100 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.920026] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.metadata_items = 128 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.920200] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.ram = 51200 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.920365] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.recheck_quota = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.920533] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.server_group_members = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.920699] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] quota.server_groups = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.920872] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.921047] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.921216] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.image_metadata_prefilter = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.921377] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.921541] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.max_attempts = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.921702] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.max_placement_results = 1000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.921867] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.922048] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.query_placement_for_image_type_support = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.922236] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.922416] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] scheduler.workers = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.922597] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.922770] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.922962] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.923173] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.923347] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.923507] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.923670] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.923861] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.924043] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.host_subset_size = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.924219] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.924381] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.924548] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.924714] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.isolated_hosts = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.924877] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.isolated_images = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.925080] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.925260] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.925430] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.925617] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.pci_in_placement = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.925751] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.925910] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.926104] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.926284] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.926451] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.926620] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.926783] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.track_instance_changes = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.926958] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.927146] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metrics.required = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.927312] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metrics.weight_multiplier = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.927475] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.927640] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] metrics.weight_setting = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.927954] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.928147] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] serial_console.enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.928379] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] serial_console.port_range = 10000:20000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.928651] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.928919] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.929156] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] serial_console.serialproxy_port = 6083 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.929334] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.929509] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.auth_type = password {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.929671] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.929832] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.929996] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.930209] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.930373] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.930546] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.send_service_user_token = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.930711] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.930870] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] service_user.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.931066] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.agent_enabled = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.931237] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.931553] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.931752] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.931924] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.html5proxy_port = 6082 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.932102] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.image_compression = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.932265] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.jpeg_compression = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.932423] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.playback_compression = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.932593] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.server_listen = 127.0.0.1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.932760] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.932939] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.streaming_mode = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.933132] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] spice.zlib_compression = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.933308] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] upgrade_levels.baseapi = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.933477] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] upgrade_levels.compute = auto {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.933638] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] upgrade_levels.conductor = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.933796] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] upgrade_levels.scheduler = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.933964] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.934168] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.auth_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.934339] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.934498] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.934663] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.934823] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.935030] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.935204] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.935366] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vendordata_dynamic_auth.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.935539] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.api_retry_count = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.935727] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.ca_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.935871] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.cache_prefix = devstack-image-cache {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.936071] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.cluster_name = testcl1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.936248] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.connection_pool_size = 10 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.936407] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.console_delay_seconds = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.936573] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.datastore_regex = ^datastore.* {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.936789] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.936960] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.host_password = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.937143] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.host_port = 443 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.937313] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.host_username = administrator@vsphere.local {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.937479] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.insecure = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.937643] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.integration_bridge = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.937804] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.maximum_objects = 100 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.937966] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.pbm_default_policy = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.938146] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.pbm_enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.938310] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.pbm_wsdl_location = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.938480] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.938643] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.serial_port_proxy_uri = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.938801] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.serial_port_service_uri = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.938979] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.task_poll_interval = 0.5 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.939161] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.use_linked_clone = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.939333] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.vnc_keymap = en-us {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.939500] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.vnc_port = 5900 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.939663] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vmware.vnc_port_total = 10000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.939848] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.auth_schemes = ['none'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.940031] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.940349] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.940537] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.940712] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.novncproxy_port = 6080 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.940890] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.server_listen = 127.0.0.1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.941073] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.941242] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.vencrypt_ca_certs = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.941403] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.vencrypt_client_cert = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.941562] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vnc.vencrypt_client_key = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.941741] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.941906] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.disable_deep_image_inspection = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.942099] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.942281] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.942446] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.942611] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.disable_rootwrap = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.942774] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.enable_numa_live_migration = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.942944] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.943143] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.943312] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.943475] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.libvirt_disable_apic = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.943639] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.943802] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.943966] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.944146] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.944309] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.944468] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.944630] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.944789] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.944977] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.945164] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.945353] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.945522] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.client_socket_timeout = 900 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.945691] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.default_pool_size = 1000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.945858] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.keep_alive = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.946066] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.max_header_line = 16384 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.946259] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.secure_proxy_ssl_header = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.946425] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.ssl_ca_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.946588] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.ssl_cert_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.946753] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.ssl_key_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.946920] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.tcp_keepidle = 600 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.947116] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.947295] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] zvm.ca_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.947457] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] zvm.cloud_connector_url = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.947752] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.947927] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] zvm.reachable_timeout = 300 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.948126] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.enforce_new_defaults = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.948299] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.enforce_scope = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.948477] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.policy_default_rule = default {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.948658] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.948833] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.policy_file = policy.yaml {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.949012] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.949183] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.949344] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.949502] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.949664] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.949832] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.950013] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.950202] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.connection_string = messaging:// {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.950369] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.enabled = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.950539] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.es_doc_type = notification {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.950702] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.es_scroll_size = 10000 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.950868] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.es_scroll_time = 2m {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.951041] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.filter_error_trace = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.951219] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.hmac_keys = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.951389] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.sentinel_service_name = mymaster {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.951561] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.socket_timeout = 0.1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.951726] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.trace_requests = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.951888] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler.trace_sqlalchemy = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.952084] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler_jaeger.process_tags = {} {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.952306] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler_jaeger.service_name_prefix = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.952493] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] profiler_otlp.service_name_prefix = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.952663] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] remote_debug.host = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.952822] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] remote_debug.port = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953009] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953184] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953346] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953509] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953717] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953828] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.953987] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.954156] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.954320] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.954491] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.954650] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.954820] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.955040] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.955213] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.955388] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.955558] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.955722] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.955948] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.956121] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.956330] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.956502] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.956679] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.956840] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.957013] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.957186] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.957357] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.957515] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.957674] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.957841] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.958014] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.ssl = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.958193] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.958361] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.958525] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.958694] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.958862] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.ssl_version = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.959034] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.959231] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.959400] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_notifications.retry = -1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.959585] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.959761] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_messaging_notifications.transport_url = **** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.959931] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.auth_section = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.960108] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.auth_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.960271] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.cafile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.960429] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.certfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.960591] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.collect_timing = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.960777] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.connect_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.960986] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.connect_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.961170] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.endpoint_id = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.961334] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.endpoint_override = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.961497] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.insecure = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.961656] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.keyfile = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.961818] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.max_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.961972] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.min_version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.962145] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.region_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.962304] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.retriable_status_codes = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.962461] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.service_name = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.962621] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.service_type = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.962782] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.split_loggers = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.962950] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.status_code_retries = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.963145] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.status_code_retry_delay = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.963307] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.timeout = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.963468] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.valid_interfaces = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.963625] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_limit.version = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.963842] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_reports.file_event_handler = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.963952] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.964126] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] oslo_reports.log_dir = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.964328] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.964496] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.964655] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.964823] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.965021] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.965200] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.965369] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.965527] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_ovs_privileged.group = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.965687] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.965853] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.966076] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.966184] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] vif_plug_ovs_privileged.user = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.966353] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.flat_interface = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.966528] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.966699] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.966869] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.967059] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.967302] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.967475] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.967644] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.967834] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.968017] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.isolate_vif = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.968202] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.968371] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.968542] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.968715] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.ovsdb_interface = native {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.968880] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_vif_ovs.per_port_bridge = False {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.969064] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_brick.lock_path = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.969237] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.969401] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.969570] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] privsep_osbrick.capabilities = [21] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.969733] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] privsep_osbrick.group = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.969894] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] privsep_osbrick.helper_command = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.970071] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.970245] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.970402] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] privsep_osbrick.user = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.970577] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.970738] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] nova_sys_admin.group = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.970900] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] nova_sys_admin.helper_command = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.971079] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.971250] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.971418] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] nova_sys_admin.user = None {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 532.971629] env[62813]: DEBUG oslo_service.service [None req-ba245590-d042-4b74-9517-28cec0e8a05e None None] ******************************************************************************** {{(pid=62813) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 532.971977] env[62813]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 532.982203] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Getting list of instances from cluster (obj){ [ 532.982203] env[62813]: value = "domain-c8" [ 532.982203] env[62813]: _type = "ClusterComputeResource" [ 532.982203] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 532.983485] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6824ffdf-012c-4a46-aa52-bdad0d191f61 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.992937] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Got total of 0 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 532.993587] env[62813]: WARNING nova.virt.vmwareapi.driver [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 532.994069] env[62813]: INFO nova.virt.node [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Generated node identity 49efdf20-78bc-435f-a902-9cc99ed395f2 [ 532.994305] env[62813]: INFO nova.virt.node [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Wrote node identity 49efdf20-78bc-435f-a902-9cc99ed395f2 to /opt/stack/data/n-cpu-1/compute_id [ 533.007123] env[62813]: WARNING nova.compute.manager [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Compute nodes ['49efdf20-78bc-435f-a902-9cc99ed395f2'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 533.044588] env[62813]: INFO nova.compute.manager [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 533.069325] env[62813]: WARNING nova.compute.manager [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 533.069575] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.069777] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.069926] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.070125] env[62813]: DEBUG nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 533.071231] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4f295f-57f8-41d9-b722-291f15cfb70c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.079971] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e65baf4-0ff7-45d3-adde-5e1453ae695b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.094674] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015d7569-d7b6-4156-959b-510b9188f0c2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.101588] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a293cc8c-bdcd-4a3b-a64b-593e3f38e64d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.132425] env[62813]: DEBUG nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180784MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 533.132588] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.132766] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.146738] env[62813]: WARNING nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] No compute node record for cpu-1:49efdf20-78bc-435f-a902-9cc99ed395f2: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 49efdf20-78bc-435f-a902-9cc99ed395f2 could not be found. [ 533.163094] env[62813]: INFO nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 49efdf20-78bc-435f-a902-9cc99ed395f2 [ 533.220745] env[62813]: DEBUG nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 533.221049] env[62813]: DEBUG nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=225GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 533.336617] env[62813]: INFO nova.scheduler.client.report [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] [req-45008453-1d8d-4449-ade2-37f77dabcf14] Created resource provider record via placement API for resource provider with UUID 49efdf20-78bc-435f-a902-9cc99ed395f2 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 533.354721] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a32863-63a2-40c6-912d-00a93bfeff02 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.363020] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f4f70c-472e-45a3-8fe8-8f2ddbbc7441 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.393523] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b78426-c865-439d-97a9-3cba89da26bc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.401774] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1134649-ce57-4d84-ad67-a6651cb2cf44 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.416131] env[62813]: DEBUG nova.compute.provider_tree [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 533.454769] env[62813]: DEBUG nova.scheduler.client.report [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Updated inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 533.455099] env[62813]: DEBUG nova.compute.provider_tree [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Updating resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 generation from 0 to 1 during operation: update_inventory {{(pid=62813) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 533.455258] env[62813]: DEBUG nova.compute.provider_tree [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 533.504835] env[62813]: DEBUG nova.compute.provider_tree [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Updating resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 generation from 1 to 2 during operation: update_traits {{(pid=62813) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 533.524019] env[62813]: DEBUG nova.compute.resource_tracker [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 533.524316] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.391s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.524520] env[62813]: DEBUG nova.service [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Creating RPC server for service compute {{(pid=62813) start /opt/stack/nova/nova/service.py:182}} [ 533.537797] env[62813]: DEBUG nova.service [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] Join ServiceGroup membership for this service compute {{(pid=62813) start /opt/stack/nova/nova/service.py:199}} [ 533.538058] env[62813]: DEBUG nova.servicegroup.drivers.db [None req-e0786a31-e12b-4941-9bc5-339843b02a8e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62813) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 542.807711] env[62813]: DEBUG dbcounter [-] [62813] Writing DB stats nova_cell0:SELECT=1 {{(pid=62813) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 542.808546] env[62813]: DEBUG dbcounter [-] [62813] Writing DB stats nova_cell1:SELECT=1 {{(pid=62813) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 547.540190] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.553057] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Getting list of instances from cluster (obj){ [ 547.553057] env[62813]: value = "domain-c8" [ 547.553057] env[62813]: _type = "ClusterComputeResource" [ 547.553057] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 547.554573] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34ae114-f2db-4922-b26d-3b7602e6a9f7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.563752] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Got total of 0 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 547.564431] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.564520] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Getting list of instances from cluster (obj){ [ 547.564520] env[62813]: value = "domain-c8" [ 547.564520] env[62813]: _type = "ClusterComputeResource" [ 547.564520] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 547.565356] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932a1bec-ecd4-4e3c-a4e5-af9601ada6e0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.573437] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Got total of 0 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 577.826966] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.827677] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.847378] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 578.022931] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.026599] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.003s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.029987] env[62813]: INFO nova.compute.claims [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.240225] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875760ea-cdb6-49ce-b1fd-6d8bc6bba128 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.250250] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33383648-6a1d-4406-a417-56a475d6b412 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.292185] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af0da93-e37c-45b9-9b6a-cd4591ed2d65 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.303733] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c0bd03-90aa-4cbe-9a09-d28a058a8db4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.324234] env[62813]: DEBUG nova.compute.provider_tree [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.346958] env[62813]: DEBUG nova.scheduler.client.report [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.382434] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.356s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.383682] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 578.462487] env[62813]: DEBUG nova.compute.utils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 578.465726] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 578.465726] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 578.504716] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 578.622557] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 578.688551] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "022718e2-52ec-4130-81b0-fb39e57d6efe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.691897] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "022718e2-52ec-4130-81b0-fb39e57d6efe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.692773] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquiring lock "c56debdf-ab77-4151-bc20-6973ae594d87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.693020] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Lock "c56debdf-ab77-4151-bc20-6973ae594d87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.709170] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 578.712422] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 578.788576] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.788835] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.790397] env[62813]: INFO nova.compute.claims [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.797409] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.953833] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c445fa79-b9b3-4f2c-88dc-26575460b648 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.963795] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dac0ef-e580-4b05-aaaa-c35b89801c1c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.997426] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6507b77c-2284-4530-bec5-e49c36fe6e23 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.007415] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5eb6a2-4719-4810-a447-4bfdb79545ca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.021400] env[62813]: DEBUG nova.compute.provider_tree [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.037692] env[62813]: DEBUG nova.scheduler.client.report [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.082173] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.082173] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 579.087371] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.289s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.087371] env[62813]: INFO nova.compute.claims [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.162069] env[62813]: DEBUG nova.compute.utils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.167801] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 579.167801] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 579.189353] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 579.322030] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d956ec7-d46f-4b4f-a444-7b7af4eb3686 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.337907] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5c369f-1ea2-4e9a-a04f-2430e6fb0160 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.344759] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.346277] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.346277] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.346277] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.346277] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.346277] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.346842] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.346842] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.347118] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.347224] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.348127] env[62813]: DEBUG nova.virt.hardware [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.348919] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e66534b-45a4-4763-91df-bb1766d2816e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.356673] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 579.397724] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15487db7-6744-45b8-b61e-0eba77c516c0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.416622] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14b4623-7960-4725-b603-26c92d7f2043 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.424328] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac77b1a-0ec5-443f-85b2-a71055efbd31 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.443994] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.444387] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.444552] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.444592] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.444728] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.444883] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.445104] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.445321] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.445432] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.445621] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.445814] env[62813]: DEBUG nova.virt.hardware [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.456506] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b3bf2a-08eb-4931-935f-79466e7c078f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.468208] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a707a139-e72a-48c3-ad44-4a364c4b5638 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.471298] env[62813]: DEBUG nova.compute.provider_tree [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.484076] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75059157-39b3-47da-8e49-75ba61ef4c01 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.489457] env[62813]: DEBUG nova.scheduler.client.report [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.513283] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.429s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.513818] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 579.589305] env[62813]: DEBUG nova.compute.utils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.590712] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 579.590914] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 579.623189] env[62813]: DEBUG nova.policy [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a8cfec3c9e84d2296bcf71c3f940034', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c79932ab21cd49709e2849be68f5a756', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 579.626889] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 579.740117] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 579.770488] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.770684] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.770841] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.771165] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.771351] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.771501] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.771709] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.771864] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.772043] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.772209] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.772378] env[62813]: DEBUG nova.virt.hardware [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.773406] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9905d6-20e1-4800-982c-636d078139a2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.783755] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a0569b-cf0f-417f-99f8-c70d89b20a5e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.153035] env[62813]: DEBUG nova.policy [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bc5d4992a1342f3a57b6ac3eec643d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '665c33cce8f84ef5964bc8363adedf27', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 580.178954] env[62813]: DEBUG nova.policy [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd46585978a464f50a2b2e5a7cd52b8cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc5ce663d1234eb1b110a7151424ca0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 581.472223] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Successfully created port: 1015057f-b5b7-41d3-be38-da52ad7703d2 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.483942] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Successfully created port: 2f2bfdef-9856-4f14-b739-bc926edc8373 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.498400] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Successfully created port: 6c5c1952-4bf4-4118-9407-f69a0173b959 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.928228] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquiring lock "4e911f12-5d9c-4713-b1e0-4d87a589a9d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.928531] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Lock "4e911f12-5d9c-4713-b1e0-4d87a589a9d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.956242] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 583.055325] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.057036] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.059713] env[62813]: INFO nova.compute.claims [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.265486] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53eebc23-b314-4f7d-95b7-55bf264308f7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.280028] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901163e4-6fc6-4aad-a353-b4b7fa1725a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.316367] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190e7661-35ec-473c-8578-cb81a492d441 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.327998] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d7fd92-fd35-4836-adc6-5fa0e8eaba75 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.342923] env[62813]: DEBUG nova.compute.provider_tree [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.366045] env[62813]: DEBUG nova.scheduler.client.report [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.398785] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.401094] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 583.493152] env[62813]: DEBUG nova.compute.utils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 583.498357] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 583.498626] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 583.526635] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 583.627212] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 583.669646] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.669907] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.671454] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.671752] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.671914] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.672080] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.672303] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.672483] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.672702] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.673613] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.673859] env[62813]: DEBUG nova.virt.hardware [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.675055] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c84c0d-06f0-46ba-95c0-e29bcd72e043 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.690376] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d958f460-8712-4fc3-95d6-035d7d50f88d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.857211] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "65d620ad-0863-4947-945e-0e4b3c01d3a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.858037] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.878031] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 583.974641] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.974817] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.976408] env[62813]: INFO nova.compute.claims [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.999568] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Successfully updated port: 1015057f-b5b7-41d3-be38-da52ad7703d2 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 584.022707] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "refresh_cache-6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.022853] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquired lock "refresh_cache-6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.023034] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.099319] env[62813]: DEBUG nova.policy [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a2d9664d15b4851b1f9d30f3babbed5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95166f4fd7764311b2469680d871786e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 584.285352] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edfa7fe-de48-4d5c-975d-165601f1391f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.297464] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d32f78-7f8f-41fa-894f-c3f177551706 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.338329] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b915792c-ab2e-4dfa-8c5a-0912c17cd6ba {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.349295] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e18a463-4ae2-43f5-9bcd-2461c8e54f6c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.368994] env[62813]: DEBUG nova.compute.provider_tree [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.390457] env[62813]: DEBUG nova.scheduler.client.report [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.397516] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.420251] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.445s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.420772] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 584.482228] env[62813]: DEBUG nova.compute.utils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 584.485014] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 584.485014] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 584.510668] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 584.619883] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 584.659800] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 584.660123] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 584.660353] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.660459] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 584.660602] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.660775] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 584.660986] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 584.661855] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 584.662053] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 584.662229] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 584.662408] env[62813]: DEBUG nova.virt.hardware [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 584.663683] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b23d78-a5cc-4418-9f27-de740fd0f0f6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.674549] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f2acb7-6170-423c-81bc-5dd94801d1a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.900087] env[62813]: DEBUG nova.policy [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8825dc7a3d6c457883432fb38cc0a83b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09c8790d652a464fb2622aec202522ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 585.419530] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Updating instance_info_cache with network_info: [{"id": "1015057f-b5b7-41d3-be38-da52ad7703d2", "address": "fa:16:3e:ed:98:fa", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1015057f-b5", "ovs_interfaceid": "1015057f-b5b7-41d3-be38-da52ad7703d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.443409] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Releasing lock "refresh_cache-6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.444493] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Instance network_info: |[{"id": "1015057f-b5b7-41d3-be38-da52ad7703d2", "address": "fa:16:3e:ed:98:fa", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1015057f-b5", "ovs_interfaceid": "1015057f-b5b7-41d3-be38-da52ad7703d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 585.445559] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:98:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1015057f-b5b7-41d3-be38-da52ad7703d2', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.463292] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.463990] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-508c20aa-4e01-448e-8e05-eafb707bd2b9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.481598] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Created folder: OpenStack in parent group-v4. [ 585.481598] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Creating folder: Project (c79932ab21cd49709e2849be68f5a756). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.481598] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2c96d91-d633-4d90-99b1-bfd400026fa1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.495372] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Created folder: Project (c79932ab21cd49709e2849be68f5a756) in parent group-v840812. [ 585.495372] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Creating folder: Instances. Parent ref: group-v840813. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 585.495372] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14dd2a24-612c-4e85-a0fa-11bc7378c23d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.509288] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Created folder: Instances in parent group-v840813. [ 585.509564] env[62813]: DEBUG oslo.service.loopingcall [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.510185] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 585.510438] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-155a8583-4f41-4ba1-ae16-7fa1c2736842 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.534949] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.534949] env[62813]: value = "task-4267561" [ 585.534949] env[62813]: _type = "Task" [ 585.534949] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.548447] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267561, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.048075] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267561, 'name': CreateVM_Task, 'duration_secs': 0.500452} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.048075] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 586.081753] env[62813]: DEBUG oslo_vmware.service [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58727b4-8f47-4979-a6f6-c85e301d2e30 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.090621] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.092445] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.092884] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 586.093455] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a720d5ca-529e-42f7-800c-b0e2ccb98e1c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.101586] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Waiting for the task: (returnval){ [ 586.101586] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52cbfff0-f926-796b-e31e-c00d1387a49a" [ 586.101586] env[62813]: _type = "Task" [ 586.101586] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.111434] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52cbfff0-f926-796b-e31e-c00d1387a49a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.617623] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.617924] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.618484] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.618484] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.618749] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.619133] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20799c42-6210-451c-b789-4a886cdd0bf9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.642028] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.642028] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 586.642028] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babfae9a-414c-487e-b225-08070463a014 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.650149] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c231b12a-f889-4da8-adda-9bfb3eabd5c2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.659867] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Waiting for the task: (returnval){ [ 586.659867] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52be4099-84b2-be4b-fa30-4594048861ef" [ 586.659867] env[62813]: _type = "Task" [ 586.659867] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.671653] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52be4099-84b2-be4b-fa30-4594048861ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.170618] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Successfully updated port: 2f2bfdef-9856-4f14-b739-bc926edc8373 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.181339] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Successfully updated port: 6c5c1952-4bf4-4118-9407-f69a0173b959 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.184534] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 587.185135] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Creating directory with path [datastore2] vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.185917] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba5124bb-0bd8-4510-82ae-5382967e126c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.193223] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Successfully created port: 777e6f2e-8681-47b5-8ee6-025229e6edff {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 587.197994] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquiring lock "refresh_cache-c56debdf-ab77-4151-bc20-6973ae594d87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.198219] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquired lock "refresh_cache-c56debdf-ab77-4151-bc20-6973ae594d87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.198374] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.199622] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "refresh_cache-022718e2-52ec-4130-81b0-fb39e57d6efe" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.199848] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquired lock "refresh_cache-022718e2-52ec-4130-81b0-fb39e57d6efe" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.199956] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.222554] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Created directory with path [datastore2] vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.224026] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Fetch image to [datastore2] vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 587.224026] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 587.224482] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f321beb4-b55c-401a-9e5f-3c9e02ad3b31 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.235123] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cb0f45-910c-4dea-8920-a9340c3e876f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.252021] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49633056-4dfe-4f1e-9995-0fcb8b99d1a5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.293947] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d517810d-6271-4ccf-8f6d-a96625eeb2d8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.301711] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-97e32d9c-6cf3-4ffb-9a42-e18a7b0d99df {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.409738] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 587.543503] env[62813]: DEBUG oslo_vmware.rw_handles [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 587.607638] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Successfully created port: ef72a6d3-945e-47c8-b221-b21b479497d7 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 587.611638] env[62813]: DEBUG oslo_vmware.rw_handles [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 587.611931] env[62813]: DEBUG oslo_vmware.rw_handles [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 587.663389] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.781810] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.479059] env[62813]: DEBUG nova.compute.manager [req-52688d31-d1cf-493f-9537-4ecd554c860a req-daf1123e-d87a-4e7e-b9c9-e5cc5aef541c service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Received event network-vif-plugged-1015057f-b5b7-41d3-be38-da52ad7703d2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 588.479368] env[62813]: DEBUG oslo_concurrency.lockutils [req-52688d31-d1cf-493f-9537-4ecd554c860a req-daf1123e-d87a-4e7e-b9c9-e5cc5aef541c service nova] Acquiring lock "6ff9b201-b6a2-45a9-bb6b-0acb8c82405f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.479563] env[62813]: DEBUG oslo_concurrency.lockutils [req-52688d31-d1cf-493f-9537-4ecd554c860a req-daf1123e-d87a-4e7e-b9c9-e5cc5aef541c service nova] Lock "6ff9b201-b6a2-45a9-bb6b-0acb8c82405f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.479745] env[62813]: DEBUG oslo_concurrency.lockutils [req-52688d31-d1cf-493f-9537-4ecd554c860a req-daf1123e-d87a-4e7e-b9c9-e5cc5aef541c service nova] Lock "6ff9b201-b6a2-45a9-bb6b-0acb8c82405f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.481413] env[62813]: DEBUG nova.compute.manager [req-52688d31-d1cf-493f-9537-4ecd554c860a req-daf1123e-d87a-4e7e-b9c9-e5cc5aef541c service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] No waiting events found dispatching network-vif-plugged-1015057f-b5b7-41d3-be38-da52ad7703d2 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 588.481788] env[62813]: WARNING nova.compute.manager [req-52688d31-d1cf-493f-9537-4ecd554c860a req-daf1123e-d87a-4e7e-b9c9-e5cc5aef541c service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Received unexpected event network-vif-plugged-1015057f-b5b7-41d3-be38-da52ad7703d2 for instance with vm_state building and task_state spawning. [ 589.029471] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Updating instance_info_cache with network_info: [{"id": "2f2bfdef-9856-4f14-b739-bc926edc8373", "address": "fa:16:3e:58:61:5e", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f2bfdef-98", "ovs_interfaceid": "2f2bfdef-9856-4f14-b739-bc926edc8373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.046378] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Updating instance_info_cache with network_info: [{"id": "6c5c1952-4bf4-4118-9407-f69a0173b959", "address": "fa:16:3e:e7:7b:43", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c5c1952-4b", "ovs_interfaceid": "6c5c1952-4bf4-4118-9407-f69a0173b959", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.054607] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Releasing lock "refresh_cache-c56debdf-ab77-4151-bc20-6973ae594d87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.054607] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Instance network_info: |[{"id": "2f2bfdef-9856-4f14-b739-bc926edc8373", "address": "fa:16:3e:58:61:5e", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f2bfdef-98", "ovs_interfaceid": "2f2bfdef-9856-4f14-b739-bc926edc8373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 589.054913] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:61:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f2bfdef-9856-4f14-b739-bc926edc8373', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.071922] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Creating folder: Project (665c33cce8f84ef5964bc8363adedf27). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.072041] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-158ce930-bf4c-480a-8abb-b27f90344e7e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.078287] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Releasing lock "refresh_cache-022718e2-52ec-4130-81b0-fb39e57d6efe" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.078287] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Instance network_info: |[{"id": "6c5c1952-4bf4-4118-9407-f69a0173b959", "address": "fa:16:3e:e7:7b:43", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c5c1952-4b", "ovs_interfaceid": "6c5c1952-4bf4-4118-9407-f69a0173b959", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 589.078412] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:7b:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c5c1952-4bf4-4118-9407-f69a0173b959', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.089378] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Creating folder: Project (dc5ce663d1234eb1b110a7151424ca0c). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.090241] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef66c89c-c789-42a4-80aa-6a8a653318e9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.102081] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Created folder: Project (665c33cce8f84ef5964bc8363adedf27) in parent group-v840812. [ 589.102081] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Creating folder: Instances. Parent ref: group-v840816. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.102081] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3fa9fa2-28e8-4b41-ac93-2ff4641ff22c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.106331] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Created folder: Project (dc5ce663d1234eb1b110a7151424ca0c) in parent group-v840812. [ 589.106519] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Creating folder: Instances. Parent ref: group-v840817. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.108121] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c5374ee-dc5f-4e7f-a3ba-02b88bca7042 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.116421] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Created folder: Instances in parent group-v840816. [ 589.117399] env[62813]: DEBUG oslo.service.loopingcall [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.117399] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 589.117399] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-563136bf-fe2e-456d-92cf-05b51cbe7529 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.133468] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Created folder: Instances in parent group-v840817. [ 589.133733] env[62813]: DEBUG oslo.service.loopingcall [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.134380] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 589.134609] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9324581-141a-4585-be5f-85dd8dbcc6a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.150399] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.150399] env[62813]: value = "task-4267566" [ 589.150399] env[62813]: _type = "Task" [ 589.150399] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.155870] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.155870] env[62813]: value = "task-4267567" [ 589.155870] env[62813]: _type = "Task" [ 589.155870] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.163262] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267566, 'name': CreateVM_Task} progress is 6%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.175343] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.175343] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267567, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.175343] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.175343] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 589.175343] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 589.216025] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 589.216025] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 589.216025] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 589.216025] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 589.216025] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 589.216323] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 589.216323] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.216490] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.216796] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.217196] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.218841] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.218841] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.218841] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 589.218841] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.239188] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.239571] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.239571] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.239923] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 589.241166] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004e1360-65b2-46d3-bdb1-2f0661e7e351 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.253258] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6cb34d-d9b8-409a-bc38-c23fcb47866c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.270183] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2436b56-298f-4da0-8f37-161745904d0c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.278689] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dd293a-7e64-4f84-a82f-629222508149 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.311842] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180774MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 589.311842] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.312121] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.391488] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 589.392047] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c56debdf-ab77-4151-bc20-6973ae594d87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 589.392047] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 022718e2-52ec-4130-81b0-fb39e57d6efe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 589.392047] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 589.392204] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 589.392434] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 589.392585] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=225GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 589.509978] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bb61b0-96d7-47f5-ac9a-b37327396187 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.523951] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3757b12-c043-4d7f-9a8d-3e70bc4dbb42 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.563495] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e57686-c739-4287-a7f5-b22a03cd59b6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.572732] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ad1f65-1a03-4ce0-b665-f5918b74245d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.588234] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.609964] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.641362] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 589.641362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.328s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.675121] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267567, 'name': CreateVM_Task, 'duration_secs': 0.402989} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.676833] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267566, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.676833] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 589.677155] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.677395] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.680528] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 589.680528] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42d04925-35d5-45ec-9023-a9164c096630 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.690188] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Successfully created port: 49eb0829-6b71-461f-b57e-c7c15ecbd523 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.694507] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Waiting for the task: (returnval){ [ 589.694507] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ed05ea-8b5b-e0f9-f1ff-38fe5fa53a48" [ 589.694507] env[62813]: _type = "Task" [ 589.694507] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.709919] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.710055] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.710701] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.057574] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.057935] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.076818] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 590.169141] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267566, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.181148] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.182339] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.184383] env[62813]: INFO nova.compute.claims [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.377325] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b2b046-e089-4d38-8243-0fee8ec28a4f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.387463] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0885dffa-8344-4c3e-b013-c92b50b23286 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.427178] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2baffe-415e-45ef-b193-01f14d4013db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.436762] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e5ff50-e583-4fbf-bf24-6ebb88e6158d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.454012] env[62813]: DEBUG nova.compute.provider_tree [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.465047] env[62813]: DEBUG nova.scheduler.client.report [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 590.500145] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.500145] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 590.555533] env[62813]: DEBUG nova.compute.utils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.557148] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 590.557326] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.574201] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 590.665478] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267566, 'name': CreateVM_Task, 'duration_secs': 1.352617} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.665894] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 590.666727] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.666947] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.667463] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 590.667793] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d10da560-fa2d-433a-85b4-37ad7e3065f2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.674755] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Waiting for the task: (returnval){ [ 590.674755] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]520a3e35-1633-fe63-4f61-5bf148197274" [ 590.674755] env[62813]: _type = "Task" [ 590.674755] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.694469] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]520a3e35-1633-fe63-4f61-5bf148197274, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.714997] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 590.752696] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.752949] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.753119] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.753300] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.753504] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.753699] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.753944] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.754220] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.754348] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.754516] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.754690] env[62813]: DEBUG nova.virt.hardware [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.755966] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bff599a-239b-4482-8f71-0f22931dedec {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.765285] env[62813]: DEBUG nova.policy [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3ee8ef20e0e43d18f3c097db0244117', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf1206472d7348938ff7fd4ac59296c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 590.767966] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d382c9e4-86b1-454b-9064-32ef7b6c2035 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.919671] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Successfully updated port: 777e6f2e-8681-47b5-8ee6-025229e6edff {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 590.931119] env[62813]: DEBUG nova.compute.manager [req-a2ccae87-5569-4f27-a0f2-eb48b3274e56 req-6f5edbf0-3450-4444-b4a4-0164bd94221d service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Received event network-vif-plugged-2f2bfdef-9856-4f14-b739-bc926edc8373 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 590.931119] env[62813]: DEBUG oslo_concurrency.lockutils [req-a2ccae87-5569-4f27-a0f2-eb48b3274e56 req-6f5edbf0-3450-4444-b4a4-0164bd94221d service nova] Acquiring lock "c56debdf-ab77-4151-bc20-6973ae594d87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.931119] env[62813]: DEBUG oslo_concurrency.lockutils [req-a2ccae87-5569-4f27-a0f2-eb48b3274e56 req-6f5edbf0-3450-4444-b4a4-0164bd94221d service nova] Lock "c56debdf-ab77-4151-bc20-6973ae594d87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.931119] env[62813]: DEBUG oslo_concurrency.lockutils [req-a2ccae87-5569-4f27-a0f2-eb48b3274e56 req-6f5edbf0-3450-4444-b4a4-0164bd94221d service nova] Lock "c56debdf-ab77-4151-bc20-6973ae594d87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.932042] env[62813]: DEBUG nova.compute.manager [req-a2ccae87-5569-4f27-a0f2-eb48b3274e56 req-6f5edbf0-3450-4444-b4a4-0164bd94221d service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] No waiting events found dispatching network-vif-plugged-2f2bfdef-9856-4f14-b739-bc926edc8373 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 590.932042] env[62813]: WARNING nova.compute.manager [req-a2ccae87-5569-4f27-a0f2-eb48b3274e56 req-6f5edbf0-3450-4444-b4a4-0164bd94221d service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Received unexpected event network-vif-plugged-2f2bfdef-9856-4f14-b739-bc926edc8373 for instance with vm_state building and task_state spawning. [ 590.948281] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquiring lock "refresh_cache-4e911f12-5d9c-4713-b1e0-4d87a589a9d8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.948437] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquired lock "refresh_cache-4e911f12-5d9c-4713-b1e0-4d87a589a9d8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.948589] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 591.026433] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "012bbc43-f61f-4aef-bd66-32fbe66f8374" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.026659] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.036100] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Successfully created port: d86cff95-b8b3-41b8-96b0-ec7de5dd008e {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.041800] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 591.136809] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.137401] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.138896] env[62813]: INFO nova.compute.claims [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.180222] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 591.192507] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.192507] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.192507] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.418306] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05439688-251d-4414-b0c3-2d0ff55d9afa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.428285] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14852878-c9c8-4421-bf1d-47e0ed995e4e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.462055] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf98c0a3-4bf9-4054-b075-0f3715bdf7e8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.470861] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e733a67e-a013-4505-a4e5-0a578e31a03c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.487720] env[62813]: DEBUG nova.compute.provider_tree [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.504127] env[62813]: DEBUG nova.scheduler.client.report [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 591.524755] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.525383] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 591.596567] env[62813]: DEBUG nova.compute.utils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.597929] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 591.598161] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 591.611549] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 591.724622] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 591.762564] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.763847] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.764389] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.764478] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.764565] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.764743] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.764935] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.765171] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.765299] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.765463] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.765863] env[62813]: DEBUG nova.virt.hardware [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.767579] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddf723d-2978-4ca6-b7f5-2200623c60d1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.778941] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b066df9-01b8-4a87-ba2d-31cce1f491ed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.208922] env[62813]: DEBUG nova.policy [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7cd09d6cf9c426bab1182db1518ae31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61912a8a410244e18aefc165315a6369', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 592.361955] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Updating instance_info_cache with network_info: [{"id": "777e6f2e-8681-47b5-8ee6-025229e6edff", "address": "fa:16:3e:37:d5:29", "network": {"id": "93afacab-6cae-4447-944f-158f62f01f06", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-696341656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95166f4fd7764311b2469680d871786e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap777e6f2e-86", "ovs_interfaceid": "777e6f2e-8681-47b5-8ee6-025229e6edff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.383474] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Releasing lock "refresh_cache-4e911f12-5d9c-4713-b1e0-4d87a589a9d8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.383474] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Instance network_info: |[{"id": "777e6f2e-8681-47b5-8ee6-025229e6edff", "address": "fa:16:3e:37:d5:29", "network": {"id": "93afacab-6cae-4447-944f-158f62f01f06", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-696341656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95166f4fd7764311b2469680d871786e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap777e6f2e-86", "ovs_interfaceid": "777e6f2e-8681-47b5-8ee6-025229e6edff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 592.383920] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:d5:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '777e6f2e-8681-47b5-8ee6-025229e6edff', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 592.391434] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Creating folder: Project (95166f4fd7764311b2469680d871786e). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 592.392123] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-909c52c9-43b6-47f4-90aa-77b1efcd5920 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.403441] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Created folder: Project (95166f4fd7764311b2469680d871786e) in parent group-v840812. [ 592.403587] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Creating folder: Instances. Parent ref: group-v840822. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 592.404340] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc7c75b9-6530-4105-b7a1-6346631e7eda {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.414153] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Created folder: Instances in parent group-v840822. [ 592.416410] env[62813]: DEBUG oslo.service.loopingcall [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.416410] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 592.416410] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99ce7199-2270-4a11-9f95-2bd31382ca5b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.436673] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 592.436673] env[62813]: value = "task-4267570" [ 592.436673] env[62813]: _type = "Task" [ 592.436673] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.446679] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267570, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.947311] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267570, 'name': CreateVM_Task, 'duration_secs': 0.422658} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.947311] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 592.951222] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.951222] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.951222] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 592.951222] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da4778e-b096-4594-a21c-030a62f44c27 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.958023] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Waiting for the task: (returnval){ [ 592.958023] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]528f2e74-678b-6348-4775-07d6c54aab96" [ 592.958023] env[62813]: _type = "Task" [ 592.958023] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.965309] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]528f2e74-678b-6348-4775-07d6c54aab96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.099874] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Successfully created port: eae723a8-45fe-42e1-aee1-e92e21c4880f {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.467520] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.467878] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 593.468011] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.559211] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Successfully created port: f7c2a635-3662-45d3-8be3-2b45bdbe6716 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.132640] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Successfully updated port: ef72a6d3-945e-47c8-b221-b21b479497d7 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.789070] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Successfully updated port: eae723a8-45fe-42e1-aee1-e92e21c4880f {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.808920] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "refresh_cache-7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.808920] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquired lock "refresh_cache-7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.808920] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.954760] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.024031] env[62813]: DEBUG nova.compute.manager [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Received event network-changed-1015057f-b5b7-41d3-be38-da52ad7703d2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 597.024031] env[62813]: DEBUG nova.compute.manager [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Refreshing instance network info cache due to event network-changed-1015057f-b5b7-41d3-be38-da52ad7703d2. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 597.024031] env[62813]: DEBUG oslo_concurrency.lockutils [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] Acquiring lock "refresh_cache-6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.024031] env[62813]: DEBUG oslo_concurrency.lockutils [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] Acquired lock "refresh_cache-6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.024031] env[62813]: DEBUG nova.network.neutron [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Refreshing network info cache for port 1015057f-b5b7-41d3-be38-da52ad7703d2 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.954685] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Received event network-vif-plugged-6c5c1952-4bf4-4118-9407-f69a0173b959 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 597.954685] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquiring lock "022718e2-52ec-4130-81b0-fb39e57d6efe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.955990] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Lock "022718e2-52ec-4130-81b0-fb39e57d6efe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.955990] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Lock "022718e2-52ec-4130-81b0-fb39e57d6efe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.955990] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] No waiting events found dispatching network-vif-plugged-6c5c1952-4bf4-4118-9407-f69a0173b959 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 597.956273] env[62813]: WARNING nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Received unexpected event network-vif-plugged-6c5c1952-4bf4-4118-9407-f69a0173b959 for instance with vm_state building and task_state spawning. [ 597.956602] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Received event network-changed-2f2bfdef-9856-4f14-b739-bc926edc8373 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 597.956923] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Refreshing instance network info cache due to event network-changed-2f2bfdef-9856-4f14-b739-bc926edc8373. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 597.957379] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquiring lock "refresh_cache-c56debdf-ab77-4151-bc20-6973ae594d87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.959548] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquired lock "refresh_cache-c56debdf-ab77-4151-bc20-6973ae594d87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.959548] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Refreshing network info cache for port 2f2bfdef-9856-4f14-b739-bc926edc8373 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 598.209024] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "09aa702f-a28c-429b-83d9-378be8606a29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.209024] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "09aa702f-a28c-429b-83d9-378be8606a29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.218086] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Updating instance_info_cache with network_info: [{"id": "eae723a8-45fe-42e1-aee1-e92e21c4880f", "address": "fa:16:3e:8a:75:4c", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae723a8-45", "ovs_interfaceid": "eae723a8-45fe-42e1-aee1-e92e21c4880f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.224176] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 598.244303] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Releasing lock "refresh_cache-7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.245213] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance network_info: |[{"id": "eae723a8-45fe-42e1-aee1-e92e21c4880f", "address": "fa:16:3e:8a:75:4c", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae723a8-45", "ovs_interfaceid": "eae723a8-45fe-42e1-aee1-e92e21c4880f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.245993] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:75:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eae723a8-45fe-42e1-aee1-e92e21c4880f', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.255577] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Creating folder: Project (cf1206472d7348938ff7fd4ac59296c0). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.256965] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4464b185-2621-49cc-ae94-686838dd43e6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.272809] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Created folder: Project (cf1206472d7348938ff7fd4ac59296c0) in parent group-v840812. [ 598.273422] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Creating folder: Instances. Parent ref: group-v840825. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.277032] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bc4fa78-2dbc-4944-9fc7-99b0e3921cff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.289723] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Created folder: Instances in parent group-v840825. [ 598.290011] env[62813]: DEBUG oslo.service.loopingcall [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.290224] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.296537] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdc0810e-6c03-45da-847e-17b74083433e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.313378] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.313729] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.315800] env[62813]: INFO nova.compute.claims [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.327749] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.327749] env[62813]: value = "task-4267573" [ 598.327749] env[62813]: _type = "Task" [ 598.327749] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.336861] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267573, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.484544] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Successfully updated port: f7c2a635-3662-45d3-8be3-2b45bdbe6716 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 598.504599] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "refresh_cache-012bbc43-f61f-4aef-bd66-32fbe66f8374" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.504898] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquired lock "refresh_cache-012bbc43-f61f-4aef-bd66-32fbe66f8374" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.505114] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.556440] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a7e20f-893f-4f25-a048-9f322ea37fca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.569157] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9979b493-2953-40b0-a9f2-0cda1909cd4d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.607687] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27927234-e61f-4165-8ddc-80e876d8ed8a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.620961] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab355dd-0916-48a6-bd91-8e3f2cac6548 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.638464] env[62813]: DEBUG nova.compute.provider_tree [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.649796] env[62813]: DEBUG nova.scheduler.client.report [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 598.676419] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.357s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.676419] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 598.685642] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.747077] env[62813]: DEBUG nova.compute.utils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.749697] env[62813]: DEBUG nova.network.neutron [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Updated VIF entry in instance network info cache for port 1015057f-b5b7-41d3-be38-da52ad7703d2. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 598.750074] env[62813]: DEBUG nova.network.neutron [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Updating instance_info_cache with network_info: [{"id": "1015057f-b5b7-41d3-be38-da52ad7703d2", "address": "fa:16:3e:ed:98:fa", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1015057f-b5", "ovs_interfaceid": "1015057f-b5b7-41d3-be38-da52ad7703d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.753543] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 598.753857] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 598.757304] env[62813]: DEBUG nova.compute.manager [req-813733f6-6698-42fc-bd84-ca69278a37a5 req-3d9043b0-7afc-4827-b698-ef632011f72f service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Received event network-vif-plugged-eae723a8-45fe-42e1-aee1-e92e21c4880f {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 598.757542] env[62813]: DEBUG oslo_concurrency.lockutils [req-813733f6-6698-42fc-bd84-ca69278a37a5 req-3d9043b0-7afc-4827-b698-ef632011f72f service nova] Acquiring lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.757731] env[62813]: DEBUG oslo_concurrency.lockutils [req-813733f6-6698-42fc-bd84-ca69278a37a5 req-3d9043b0-7afc-4827-b698-ef632011f72f service nova] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.758021] env[62813]: DEBUG oslo_concurrency.lockutils [req-813733f6-6698-42fc-bd84-ca69278a37a5 req-3d9043b0-7afc-4827-b698-ef632011f72f service nova] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.758295] env[62813]: DEBUG nova.compute.manager [req-813733f6-6698-42fc-bd84-ca69278a37a5 req-3d9043b0-7afc-4827-b698-ef632011f72f service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] No waiting events found dispatching network-vif-plugged-eae723a8-45fe-42e1-aee1-e92e21c4880f {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 598.758295] env[62813]: WARNING nova.compute.manager [req-813733f6-6698-42fc-bd84-ca69278a37a5 req-3d9043b0-7afc-4827-b698-ef632011f72f service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Received unexpected event network-vif-plugged-eae723a8-45fe-42e1-aee1-e92e21c4880f for instance with vm_state building and task_state spawning. [ 598.764927] env[62813]: DEBUG oslo_concurrency.lockutils [req-af274191-e906-4595-906e-30d0ad937914 req-4a032054-3ea5-4485-9dfb-ed56055c0377 service nova] Releasing lock "refresh_cache-6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.768316] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 598.841961] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267573, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.857038] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 598.886477] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 598.886701] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 598.886871] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.887104] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 598.887245] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.888108] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 598.888108] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 598.888108] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 598.888108] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 598.888279] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 598.888279] env[62813]: DEBUG nova.virt.hardware [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 598.889358] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb8c173-643a-4d51-bc99-93e1bced7464 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.899588] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246cc37a-e242-442e-87bd-d1420c73a806 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.945193] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Updated VIF entry in instance network info cache for port 2f2bfdef-9856-4f14-b739-bc926edc8373. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 598.945522] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Updating instance_info_cache with network_info: [{"id": "2f2bfdef-9856-4f14-b739-bc926edc8373", "address": "fa:16:3e:58:61:5e", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f2bfdef-98", "ovs_interfaceid": "2f2bfdef-9856-4f14-b739-bc926edc8373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.957077] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Releasing lock "refresh_cache-c56debdf-ab77-4151-bc20-6973ae594d87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.957428] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Received event network-changed-6c5c1952-4bf4-4118-9407-f69a0173b959 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 598.957516] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Refreshing instance network info cache due to event network-changed-6c5c1952-4bf4-4118-9407-f69a0173b959. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 598.957737] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquiring lock "refresh_cache-022718e2-52ec-4130-81b0-fb39e57d6efe" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.961043] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquired lock "refresh_cache-022718e2-52ec-4130-81b0-fb39e57d6efe" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.961043] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Refreshing network info cache for port 6c5c1952-4bf4-4118-9407-f69a0173b959 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 599.171931] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Successfully updated port: 49eb0829-6b71-461f-b57e-c7c15ecbd523 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 599.347687] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267573, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.350209] env[62813]: DEBUG nova.policy [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a12b6f20b5234b47b780a8226d6a6a2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6cf85b5a0bb04dd0a3d7e75b40622ec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 599.605176] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Updated VIF entry in instance network info cache for port 6c5c1952-4bf4-4118-9407-f69a0173b959. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 599.605538] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Updating instance_info_cache with network_info: [{"id": "6c5c1952-4bf4-4118-9407-f69a0173b959", "address": "fa:16:3e:e7:7b:43", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c5c1952-4b", "ovs_interfaceid": "6c5c1952-4bf4-4118-9407-f69a0173b959", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.621822] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Releasing lock "refresh_cache-022718e2-52ec-4130-81b0-fb39e57d6efe" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.622126] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Received event network-vif-plugged-777e6f2e-8681-47b5-8ee6-025229e6edff {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 599.622414] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquiring lock "4e911f12-5d9c-4713-b1e0-4d87a589a9d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.622648] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Lock "4e911f12-5d9c-4713-b1e0-4d87a589a9d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.622827] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Lock "4e911f12-5d9c-4713-b1e0-4d87a589a9d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.623596] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] No waiting events found dispatching network-vif-plugged-777e6f2e-8681-47b5-8ee6-025229e6edff {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 599.623596] env[62813]: WARNING nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Received unexpected event network-vif-plugged-777e6f2e-8681-47b5-8ee6-025229e6edff for instance with vm_state building and task_state spawning. [ 599.623596] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Received event network-changed-777e6f2e-8681-47b5-8ee6-025229e6edff {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 599.623928] env[62813]: DEBUG nova.compute.manager [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Refreshing instance network info cache due to event network-changed-777e6f2e-8681-47b5-8ee6-025229e6edff. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 599.624709] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquiring lock "refresh_cache-4e911f12-5d9c-4713-b1e0-4d87a589a9d8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.625335] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Acquired lock "refresh_cache-4e911f12-5d9c-4713-b1e0-4d87a589a9d8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.626078] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Refreshing network info cache for port 777e6f2e-8681-47b5-8ee6-025229e6edff {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 599.844723] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267573, 'name': CreateVM_Task, 'duration_secs': 1.401852} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.844897] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.848231] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.848231] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.848231] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 599.848402] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6033eabf-5079-4804-a6b4-947513166fe3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.856442] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Waiting for the task: (returnval){ [ 599.856442] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52945933-7a76-bcd1-152a-64e245e175e9" [ 599.856442] env[62813]: _type = "Task" [ 599.856442] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.871818] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52945933-7a76-bcd1-152a-64e245e175e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.964621] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Updating instance_info_cache with network_info: [{"id": "f7c2a635-3662-45d3-8be3-2b45bdbe6716", "address": "fa:16:3e:d7:d9:9d", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7c2a635-36", "ovs_interfaceid": "f7c2a635-3662-45d3-8be3-2b45bdbe6716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.988232] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Releasing lock "refresh_cache-012bbc43-f61f-4aef-bd66-32fbe66f8374" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.989342] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance network_info: |[{"id": "f7c2a635-3662-45d3-8be3-2b45bdbe6716", "address": "fa:16:3e:d7:d9:9d", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7c2a635-36", "ovs_interfaceid": "f7c2a635-3662-45d3-8be3-2b45bdbe6716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 599.992461] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:d9:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7c2a635-3662-45d3-8be3-2b45bdbe6716', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 600.002579] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Creating folder: Project (61912a8a410244e18aefc165315a6369). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 600.003691] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ce7282b-1298-47c6-9865-6116122ed896 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.019204] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Created folder: Project (61912a8a410244e18aefc165315a6369) in parent group-v840812. [ 600.019204] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Creating folder: Instances. Parent ref: group-v840828. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 600.019204] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d55c67b-e8d7-4578-ac42-0894eb343a45 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.031968] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Created folder: Instances in parent group-v840828. [ 600.032347] env[62813]: DEBUG oslo.service.loopingcall [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.036967] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 600.036967] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91e41e3c-becb-4043-8601-87ffb930abff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.063343] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 600.063343] env[62813]: value = "task-4267576" [ 600.063343] env[62813]: _type = "Task" [ 600.063343] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.074336] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267576, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.368395] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.368694] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.368888] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.578636] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267576, 'name': CreateVM_Task, 'duration_secs': 0.362023} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.579891] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 600.581028] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.581028] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.581324] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 600.581647] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ec109a6-286d-4120-ae3b-040903130dd7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.592535] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Waiting for the task: (returnval){ [ 600.592535] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52a17d01-6295-5da6-16b0-ad7bea1a45ae" [ 600.592535] env[62813]: _type = "Task" [ 600.592535] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.605859] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.605859] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.605859] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.776941] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Updated VIF entry in instance network info cache for port 777e6f2e-8681-47b5-8ee6-025229e6edff. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 600.777917] env[62813]: DEBUG nova.network.neutron [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Updating instance_info_cache with network_info: [{"id": "777e6f2e-8681-47b5-8ee6-025229e6edff", "address": "fa:16:3e:37:d5:29", "network": {"id": "93afacab-6cae-4447-944f-158f62f01f06", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-696341656-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95166f4fd7764311b2469680d871786e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap777e6f2e-86", "ovs_interfaceid": "777e6f2e-8681-47b5-8ee6-025229e6edff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.802833] env[62813]: DEBUG oslo_concurrency.lockutils [req-1dacd5cf-0c6f-4ab3-ac97-ecbcb9e7ff1a req-66a35bf4-2067-428f-b022-e35fdb80827a service nova] Releasing lock "refresh_cache-4e911f12-5d9c-4713-b1e0-4d87a589a9d8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.970814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "9a448d2b-0dee-4a90-b131-e6ada542f342" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.974400] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.996447] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 601.020537] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "aa76585b-55a8-437c-8dea-7731d85a3b82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.020826] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.044888] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 601.108907] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.109224] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.111045] env[62813]: INFO nova.compute.claims [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.124290] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.349642] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Successfully created port: d8206f05-0196-48f9-9338-d0c5e01f3e69 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.451789] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c0f140-20f0-4e53-bfc9-87de17e63838 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.464535] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca1abf1-41e8-46ea-b350-4c23e4a1b066 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.501245] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50bcb05-a182-413e-8285-28d36246ddbe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.510309] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0049f7bf-2196-413f-b569-0a0823246c6c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.527222] env[62813]: DEBUG nova.compute.provider_tree [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.543138] env[62813]: DEBUG nova.scheduler.client.report [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.559877] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.451s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.560483] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 601.564221] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.440s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.564473] env[62813]: INFO nova.compute.claims [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.606737] env[62813]: DEBUG nova.compute.utils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.607551] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 601.607724] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.620731] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 601.733359] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 601.772662] env[62813]: DEBUG nova.policy [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a75d104817c465086b9865dd247b6ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7183133e08fb4df887a305c97c13fd77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 601.790583] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.790825] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.790982] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.792593] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.792874] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.792963] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.793166] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.793399] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.793639] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.793716] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.793878] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.795116] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9682203-7e5e-43b9-ad43-e774745d0635 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.805757] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06640eae-c7a4-42f4-87c9-16a62d228722 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.832022] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.832022] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.909674] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc0e983-a225-4dde-bdfa-d020bfe69c8b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.919776] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1599bed-faa3-4b9b-a0d4-b8640fc8040c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.953468] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0c29e5-7119-450d-8cfb-122763741a81 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.964029] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7528ed-db89-4fab-bd29-ef9bf0fe8456 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.984367] env[62813]: DEBUG nova.compute.provider_tree [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.997738] env[62813]: DEBUG nova.scheduler.client.report [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.016238] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.453s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.016766] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 602.080890] env[62813]: DEBUG nova.compute.utils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 602.081864] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 602.082232] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 602.100288] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 602.206843] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 602.248542] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.248802] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.248976] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.249193] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.249349] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.249497] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.249714] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.249875] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.250057] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.250257] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.250423] env[62813]: DEBUG nova.virt.hardware [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.251638] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e90dca-3a33-4e13-8eaf-dec4bdec15ff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.262932] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c71238-9cb8-476a-ad5a-e7db54203c8c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.370859] env[62813]: DEBUG nova.policy [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a75d104817c465086b9865dd247b6ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7183133e08fb4df887a305c97c13fd77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 602.852083] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Successfully updated port: d86cff95-b8b3-41b8-96b0-ec7de5dd008e {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 602.865402] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.865914] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.865914] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.045275] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Successfully created port: ccd65f56-38a6-4d10-8583-cf86b146c2df {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.290192] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.473303] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received event network-vif-plugged-ef72a6d3-945e-47c8-b221-b21b479497d7 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 603.473772] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquiring lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.474182] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.474501] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.474776] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] No waiting events found dispatching network-vif-plugged-ef72a6d3-945e-47c8-b221-b21b479497d7 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 603.475107] env[62813]: WARNING nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received unexpected event network-vif-plugged-ef72a6d3-945e-47c8-b221-b21b479497d7 for instance with vm_state building and task_state spawning. [ 603.475413] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received event network-changed-ef72a6d3-945e-47c8-b221-b21b479497d7 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 603.476676] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Refreshing instance network info cache due to event network-changed-ef72a6d3-945e-47c8-b221-b21b479497d7. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 603.476676] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquiring lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.900587] env[62813]: DEBUG nova.compute.manager [req-debf0642-edd5-4ff0-a28a-b79571019b44 req-af1fb096-989a-435d-91e3-b599d97f0b83 service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Received event network-vif-plugged-f7c2a635-3662-45d3-8be3-2b45bdbe6716 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 603.900587] env[62813]: DEBUG oslo_concurrency.lockutils [req-debf0642-edd5-4ff0-a28a-b79571019b44 req-af1fb096-989a-435d-91e3-b599d97f0b83 service nova] Acquiring lock "012bbc43-f61f-4aef-bd66-32fbe66f8374-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.900587] env[62813]: DEBUG oslo_concurrency.lockutils [req-debf0642-edd5-4ff0-a28a-b79571019b44 req-af1fb096-989a-435d-91e3-b599d97f0b83 service nova] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.900587] env[62813]: DEBUG oslo_concurrency.lockutils [req-debf0642-edd5-4ff0-a28a-b79571019b44 req-af1fb096-989a-435d-91e3-b599d97f0b83 service nova] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.900775] env[62813]: DEBUG nova.compute.manager [req-debf0642-edd5-4ff0-a28a-b79571019b44 req-af1fb096-989a-435d-91e3-b599d97f0b83 service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] No waiting events found dispatching network-vif-plugged-f7c2a635-3662-45d3-8be3-2b45bdbe6716 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 603.900884] env[62813]: WARNING nova.compute.manager [req-debf0642-edd5-4ff0-a28a-b79571019b44 req-af1fb096-989a-435d-91e3-b599d97f0b83 service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Received unexpected event network-vif-plugged-f7c2a635-3662-45d3-8be3-2b45bdbe6716 for instance with vm_state building and task_state spawning. [ 604.659246] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Successfully updated port: d8206f05-0196-48f9-9338-d0c5e01f3e69 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 604.674028] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "refresh_cache-09aa702f-a28c-429b-83d9-378be8606a29" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.674028] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquired lock "refresh_cache-09aa702f-a28c-429b-83d9-378be8606a29" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.674028] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 604.811272] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Successfully created port: 3868ae93-45be-4658-ae16-01769db46a9d {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.837903] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.146359] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Successfully updated port: ccd65f56-38a6-4d10-8583-cf86b146c2df {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.168490] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "refresh_cache-9a448d2b-0dee-4a90-b131-e6ada542f342" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.171036] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired lock "refresh_cache-9a448d2b-0dee-4a90-b131-e6ada542f342" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.171036] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.313205] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updating instance_info_cache with network_info: [{"id": "ef72a6d3-945e-47c8-b221-b21b479497d7", "address": "fa:16:3e:44:58:56", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef72a6d3-94", "ovs_interfaceid": "ef72a6d3-945e-47c8-b221-b21b479497d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "address": "fa:16:3e:4f:0a:fe", "network": {"id": "d31ea7bf-62c7-4893-bdca-0e30cc7eeeb4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-203156267", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49eb0829-6b", "ovs_interfaceid": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "address": "fa:16:3e:2d:7c:ac", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86cff95-b8", "ovs_interfaceid": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.317665] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.339157] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Releasing lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.339579] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance network_info: |[{"id": "ef72a6d3-945e-47c8-b221-b21b479497d7", "address": "fa:16:3e:44:58:56", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef72a6d3-94", "ovs_interfaceid": "ef72a6d3-945e-47c8-b221-b21b479497d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "address": "fa:16:3e:4f:0a:fe", "network": {"id": "d31ea7bf-62c7-4893-bdca-0e30cc7eeeb4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-203156267", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49eb0829-6b", "ovs_interfaceid": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "address": "fa:16:3e:2d:7c:ac", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86cff95-b8", "ovs_interfaceid": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.343458] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquired lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.344218] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Refreshing network info cache for port ef72a6d3-945e-47c8-b221-b21b479497d7 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 605.347256] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:58:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbd7899c-c96e-47fc-9141-5803b646917a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef72a6d3-945e-47c8-b221-b21b479497d7', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:0a:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a766368-f5a3-472c-af56-9cfca63012ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49eb0829-6b71-461f-b57e-c7c15ecbd523', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:7c:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbd7899c-c96e-47fc-9141-5803b646917a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd86cff95-b8b3-41b8-96b0-ec7de5dd008e', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.367481] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating folder: Project (09c8790d652a464fb2622aec202522ea). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.371442] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e33ca68e-9432-45ed-9645-9fcd805abbc0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.386209] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Created folder: Project (09c8790d652a464fb2622aec202522ea) in parent group-v840812. [ 605.386551] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating folder: Instances. Parent ref: group-v840831. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.387938] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ff62555-9fbf-4ecc-a91c-123b3f002463 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.400048] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Created folder: Instances in parent group-v840831. [ 605.400323] env[62813]: DEBUG oslo.service.loopingcall [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.400532] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 605.400743] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4e3d0a0-1c84-4a4e-bc32-7627b39329b9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.431821] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 605.431821] env[62813]: value = "task-4267579" [ 605.431821] env[62813]: _type = "Task" [ 605.431821] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.441742] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267579, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.581246] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "76b0e03d-9636-4328-bfd5-17c434cfae72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.581887] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.692754] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Updating instance_info_cache with network_info: [{"id": "d8206f05-0196-48f9-9338-d0c5e01f3e69", "address": "fa:16:3e:6f:73:48", "network": {"id": "39e6ef5c-b951-48c3-ac9c-3efbb9b7349c", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1616664542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cf85b5a0bb04dd0a3d7e75b40622ec0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8206f05-01", "ovs_interfaceid": "d8206f05-0196-48f9-9338-d0c5e01f3e69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.720198] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Releasing lock "refresh_cache-09aa702f-a28c-429b-83d9-378be8606a29" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.720198] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance network_info: |[{"id": "d8206f05-0196-48f9-9338-d0c5e01f3e69", "address": "fa:16:3e:6f:73:48", "network": {"id": "39e6ef5c-b951-48c3-ac9c-3efbb9b7349c", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1616664542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cf85b5a0bb04dd0a3d7e75b40622ec0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8206f05-01", "ovs_interfaceid": "d8206f05-0196-48f9-9338-d0c5e01f3e69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.720700] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:73:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8206f05-0196-48f9-9338-d0c5e01f3e69', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.731673] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Creating folder: Project (6cf85b5a0bb04dd0a3d7e75b40622ec0). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.732459] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-697f2ede-dceb-4d87-8c96-042b55cbfa4a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.745969] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Created folder: Project (6cf85b5a0bb04dd0a3d7e75b40622ec0) in parent group-v840812. [ 605.746240] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Creating folder: Instances. Parent ref: group-v840834. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.746496] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12538d61-8050-4058-8cef-6219b047c74c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.758787] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Created folder: Instances in parent group-v840834. [ 605.758918] env[62813]: DEBUG oslo.service.loopingcall [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.759160] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 605.759419] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26f0c319-f2ee-4b2c-85d1-c23a975aa905 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.784579] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 605.784579] env[62813]: value = "task-4267582" [ 605.784579] env[62813]: _type = "Task" [ 605.784579] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.796274] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267582, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.942667] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267579, 'name': CreateVM_Task, 'duration_secs': 0.481648} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.942667] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 605.943583] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.943676] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.944104] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 605.944303] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55be401c-5731-4326-8ef3-23c4b8da3f25 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.947817] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Updating instance_info_cache with network_info: [{"id": "ccd65f56-38a6-4d10-8583-cf86b146c2df", "address": "fa:16:3e:c9:7e:1a", "network": {"id": "fe4d8d65-f572-4301-8dd9-68a231f2fa3e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-257476310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7183133e08fb4df887a305c97c13fd77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccd65f56-38", "ovs_interfaceid": "ccd65f56-38a6-4d10-8583-cf86b146c2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.950424] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 605.950424] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]522883d5-2823-79dc-f6e6-bf97332689d6" [ 605.950424] env[62813]: _type = "Task" [ 605.950424] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.960493] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]522883d5-2823-79dc-f6e6-bf97332689d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.965813] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Releasing lock "refresh_cache-9a448d2b-0dee-4a90-b131-e6ada542f342" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.965813] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance network_info: |[{"id": "ccd65f56-38a6-4d10-8583-cf86b146c2df", "address": "fa:16:3e:c9:7e:1a", "network": {"id": "fe4d8d65-f572-4301-8dd9-68a231f2fa3e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-257476310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7183133e08fb4df887a305c97c13fd77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccd65f56-38", "ovs_interfaceid": "ccd65f56-38a6-4d10-8583-cf86b146c2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.965813] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:7e:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccd65f56-38a6-4d10-8583-cf86b146c2df', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.974034] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating folder: Project (7183133e08fb4df887a305c97c13fd77). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.974548] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ba3ce6e-c0a9-4014-8921-2651f0909061 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.988706] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Created folder: Project (7183133e08fb4df887a305c97c13fd77) in parent group-v840812. [ 605.988979] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating folder: Instances. Parent ref: group-v840837. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.989928] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce4726d4-c47b-4510-9429-bc47ccc73505 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.001190] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Created folder: Instances in parent group-v840837. [ 606.001448] env[62813]: DEBUG oslo.service.loopingcall [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.001648] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 606.001861] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83ecdde3-aa6f-41a8-b525-ec69a65a7ddb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.026312] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.026312] env[62813]: value = "task-4267585" [ 606.026312] env[62813]: _type = "Task" [ 606.026312] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.036511] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267585, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.136769] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updated VIF entry in instance network info cache for port ef72a6d3-945e-47c8-b221-b21b479497d7. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 606.137153] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updating instance_info_cache with network_info: [{"id": "ef72a6d3-945e-47c8-b221-b21b479497d7", "address": "fa:16:3e:44:58:56", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef72a6d3-94", "ovs_interfaceid": "ef72a6d3-945e-47c8-b221-b21b479497d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "address": "fa:16:3e:4f:0a:fe", "network": {"id": "d31ea7bf-62c7-4893-bdca-0e30cc7eeeb4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-203156267", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49eb0829-6b", "ovs_interfaceid": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "address": "fa:16:3e:2d:7c:ac", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86cff95-b8", "ovs_interfaceid": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.158435] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Releasing lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.158743] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Received event network-changed-eae723a8-45fe-42e1-aee1-e92e21c4880f {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 606.158947] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Refreshing instance network info cache due to event network-changed-eae723a8-45fe-42e1-aee1-e92e21c4880f. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 606.159562] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquiring lock "refresh_cache-7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.159562] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquired lock "refresh_cache-7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.160922] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Refreshing network info cache for port eae723a8-45fe-42e1-aee1-e92e21c4880f {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 606.299348] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267582, 'name': CreateVM_Task, 'duration_secs': 0.399021} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.299835] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 606.301068] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.469499] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.469979] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.469979] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.470786] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.470786] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 606.470946] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6b72ddc-ff9d-4284-8cab-48ad23e3985d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.479903] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Waiting for the task: (returnval){ [ 606.479903] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52cd9259-b8d8-42e2-8205-f743b50f9dd8" [ 606.479903] env[62813]: _type = "Task" [ 606.479903] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.494297] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.494686] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.494982] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.536766] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267585, 'name': CreateVM_Task, 'duration_secs': 0.395462} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.537144] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 606.537701] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.537909] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.538228] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 606.538468] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78fe4a77-dc8f-462b-8dac-67a213e1cfa4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.544326] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 606.544326] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]521f54b6-1a1e-35bd-ed2b-68214e328d5b" [ 606.544326] env[62813]: _type = "Task" [ 606.544326] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.557613] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]521f54b6-1a1e-35bd-ed2b-68214e328d5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.792018] env[62813]: DEBUG nova.compute.manager [req-a2396428-f9fb-475f-83cc-45a10a097de8 req-5b824908-e158-4c0a-ab76-a64dc0d05e84 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Received event network-vif-plugged-ccd65f56-38a6-4d10-8583-cf86b146c2df {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 606.792302] env[62813]: DEBUG oslo_concurrency.lockutils [req-a2396428-f9fb-475f-83cc-45a10a097de8 req-5b824908-e158-4c0a-ab76-a64dc0d05e84 service nova] Acquiring lock "9a448d2b-0dee-4a90-b131-e6ada542f342-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.792537] env[62813]: DEBUG oslo_concurrency.lockutils [req-a2396428-f9fb-475f-83cc-45a10a097de8 req-5b824908-e158-4c0a-ab76-a64dc0d05e84 service nova] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.792760] env[62813]: DEBUG oslo_concurrency.lockutils [req-a2396428-f9fb-475f-83cc-45a10a097de8 req-5b824908-e158-4c0a-ab76-a64dc0d05e84 service nova] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.792871] env[62813]: DEBUG nova.compute.manager [req-a2396428-f9fb-475f-83cc-45a10a097de8 req-5b824908-e158-4c0a-ab76-a64dc0d05e84 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] No waiting events found dispatching network-vif-plugged-ccd65f56-38a6-4d10-8583-cf86b146c2df {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 606.799091] env[62813]: WARNING nova.compute.manager [req-a2396428-f9fb-475f-83cc-45a10a097de8 req-5b824908-e158-4c0a-ab76-a64dc0d05e84 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Received unexpected event network-vif-plugged-ccd65f56-38a6-4d10-8583-cf86b146c2df for instance with vm_state building and task_state spawning. [ 607.058241] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.058548] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.058765] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.222270] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received event network-vif-plugged-d86cff95-b8b3-41b8-96b0-ec7de5dd008e {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 607.222527] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Acquiring lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.222784] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.222997] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.223283] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] No waiting events found dispatching network-vif-plugged-d86cff95-b8b3-41b8-96b0-ec7de5dd008e {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 607.223519] env[62813]: WARNING nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received unexpected event network-vif-plugged-d86cff95-b8b3-41b8-96b0-ec7de5dd008e for instance with vm_state building and task_state spawning. [ 607.223699] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received event network-changed-d86cff95-b8b3-41b8-96b0-ec7de5dd008e {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 607.223904] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Refreshing instance network info cache due to event network-changed-d86cff95-b8b3-41b8-96b0-ec7de5dd008e. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 607.224176] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Acquiring lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.224335] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Acquired lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.224499] env[62813]: DEBUG nova.network.neutron [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Refreshing network info cache for port d86cff95-b8b3-41b8-96b0-ec7de5dd008e {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 607.295532] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Updated VIF entry in instance network info cache for port eae723a8-45fe-42e1-aee1-e92e21c4880f. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 607.295719] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Updating instance_info_cache with network_info: [{"id": "eae723a8-45fe-42e1-aee1-e92e21c4880f", "address": "fa:16:3e:8a:75:4c", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae723a8-45", "ovs_interfaceid": "eae723a8-45fe-42e1-aee1-e92e21c4880f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.312824] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Releasing lock "refresh_cache-7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.313115] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received event network-vif-plugged-49eb0829-6b71-461f-b57e-c7c15ecbd523 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 607.313295] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquiring lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.313775] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.313775] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.313894] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] No waiting events found dispatching network-vif-plugged-49eb0829-6b71-461f-b57e-c7c15ecbd523 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 607.314153] env[62813]: WARNING nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received unexpected event network-vif-plugged-49eb0829-6b71-461f-b57e-c7c15ecbd523 for instance with vm_state building and task_state spawning. [ 607.314359] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Received event network-changed-f7c2a635-3662-45d3-8be3-2b45bdbe6716 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 607.314523] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Refreshing instance network info cache due to event network-changed-f7c2a635-3662-45d3-8be3-2b45bdbe6716. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 607.314714] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquiring lock "refresh_cache-012bbc43-f61f-4aef-bd66-32fbe66f8374" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.314850] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquired lock "refresh_cache-012bbc43-f61f-4aef-bd66-32fbe66f8374" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.315019] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Refreshing network info cache for port f7c2a635-3662-45d3-8be3-2b45bdbe6716 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 607.583293] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Successfully updated port: 3868ae93-45be-4658-ae16-01769db46a9d {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.602734] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "refresh_cache-aa76585b-55a8-437c-8dea-7731d85a3b82" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.602937] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired lock "refresh_cache-aa76585b-55a8-437c-8dea-7731d85a3b82" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.603111] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.956081] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.345359] env[62813]: DEBUG nova.network.neutron [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updated VIF entry in instance network info cache for port d86cff95-b8b3-41b8-96b0-ec7de5dd008e. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 608.345951] env[62813]: DEBUG nova.network.neutron [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updating instance_info_cache with network_info: [{"id": "ef72a6d3-945e-47c8-b221-b21b479497d7", "address": "fa:16:3e:44:58:56", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef72a6d3-94", "ovs_interfaceid": "ef72a6d3-945e-47c8-b221-b21b479497d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "address": "fa:16:3e:4f:0a:fe", "network": {"id": "d31ea7bf-62c7-4893-bdca-0e30cc7eeeb4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-203156267", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49eb0829-6b", "ovs_interfaceid": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "address": "fa:16:3e:2d:7c:ac", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86cff95-b8", "ovs_interfaceid": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.363563] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Releasing lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.364167] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Received event network-vif-plugged-d8206f05-0196-48f9-9338-d0c5e01f3e69 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 608.364167] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Acquiring lock "09aa702f-a28c-429b-83d9-378be8606a29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.364167] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Lock "09aa702f-a28c-429b-83d9-378be8606a29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.364321] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Lock "09aa702f-a28c-429b-83d9-378be8606a29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.365080] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] No waiting events found dispatching network-vif-plugged-d8206f05-0196-48f9-9338-d0c5e01f3e69 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 608.365080] env[62813]: WARNING nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Received unexpected event network-vif-plugged-d8206f05-0196-48f9-9338-d0c5e01f3e69 for instance with vm_state building and task_state spawning. [ 608.365080] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Received event network-changed-d8206f05-0196-48f9-9338-d0c5e01f3e69 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 608.365080] env[62813]: DEBUG nova.compute.manager [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Refreshing instance network info cache due to event network-changed-d8206f05-0196-48f9-9338-d0c5e01f3e69. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 608.365786] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Acquiring lock "refresh_cache-09aa702f-a28c-429b-83d9-378be8606a29" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.366087] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Acquired lock "refresh_cache-09aa702f-a28c-429b-83d9-378be8606a29" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.366296] env[62813]: DEBUG nova.network.neutron [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Refreshing network info cache for port d8206f05-0196-48f9-9338-d0c5e01f3e69 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 608.611250] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Updated VIF entry in instance network info cache for port f7c2a635-3662-45d3-8be3-2b45bdbe6716. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 608.611633] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Updating instance_info_cache with network_info: [{"id": "f7c2a635-3662-45d3-8be3-2b45bdbe6716", "address": "fa:16:3e:d7:d9:9d", "network": {"id": "118ce385-2ab0-4678-988b-827442af7295", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9ff9378f181e456fa241a7d30ef08cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7c2a635-36", "ovs_interfaceid": "f7c2a635-3662-45d3-8be3-2b45bdbe6716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.622727] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Releasing lock "refresh_cache-012bbc43-f61f-4aef-bd66-32fbe66f8374" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.623467] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Received event network-changed-49eb0829-6b71-461f-b57e-c7c15ecbd523 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 608.624018] env[62813]: DEBUG nova.compute.manager [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Refreshing instance network info cache due to event network-changed-49eb0829-6b71-461f-b57e-c7c15ecbd523. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 608.624296] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquiring lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.624497] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Acquired lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.624699] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Refreshing network info cache for port 49eb0829-6b71-461f-b57e-c7c15ecbd523 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 608.775435] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Updating instance_info_cache with network_info: [{"id": "3868ae93-45be-4658-ae16-01769db46a9d", "address": "fa:16:3e:8a:c4:d8", "network": {"id": "fe4d8d65-f572-4301-8dd9-68a231f2fa3e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-257476310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7183133e08fb4df887a305c97c13fd77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3868ae93-45", "ovs_interfaceid": "3868ae93-45be-4658-ae16-01769db46a9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.798266] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Releasing lock "refresh_cache-aa76585b-55a8-437c-8dea-7731d85a3b82" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.798266] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance network_info: |[{"id": "3868ae93-45be-4658-ae16-01769db46a9d", "address": "fa:16:3e:8a:c4:d8", "network": {"id": "fe4d8d65-f572-4301-8dd9-68a231f2fa3e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-257476310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7183133e08fb4df887a305c97c13fd77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3868ae93-45", "ovs_interfaceid": "3868ae93-45be-4658-ae16-01769db46a9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 608.798266] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:c4:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3868ae93-45be-4658-ae16-01769db46a9d', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.807540] env[62813]: DEBUG oslo.service.loopingcall [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.809761] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 608.809761] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d77ffa6b-fbc1-4302-be50-cc34bfda6d0f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.836766] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.836766] env[62813]: value = "task-4267586" [ 608.836766] env[62813]: _type = "Task" [ 608.836766] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.846383] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267586, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.350059] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267586, 'name': CreateVM_Task, 'duration_secs': 0.378882} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.350459] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 609.351202] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.351379] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.351700] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 609.354239] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb6225fc-b428-491f-9516-68a61d9c4d07 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.359203] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 609.359203] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5242d44a-6aa5-c54b-08f5-d2a3eca9e9de" [ 609.359203] env[62813]: _type = "Task" [ 609.359203] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.373771] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5242d44a-6aa5-c54b-08f5-d2a3eca9e9de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.397909] env[62813]: DEBUG nova.network.neutron [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Updated VIF entry in instance network info cache for port d8206f05-0196-48f9-9338-d0c5e01f3e69. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 609.398901] env[62813]: DEBUG nova.network.neutron [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Updating instance_info_cache with network_info: [{"id": "d8206f05-0196-48f9-9338-d0c5e01f3e69", "address": "fa:16:3e:6f:73:48", "network": {"id": "39e6ef5c-b951-48c3-ac9c-3efbb9b7349c", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1616664542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cf85b5a0bb04dd0a3d7e75b40622ec0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8206f05-01", "ovs_interfaceid": "d8206f05-0196-48f9-9338-d0c5e01f3e69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.418770] env[62813]: DEBUG oslo_concurrency.lockutils [req-e153d9b7-5bdb-4f2c-8fb9-4c513376b3f3 req-97c2f51d-aaa6-470a-b327-461c7e3a066d service nova] Releasing lock "refresh_cache-09aa702f-a28c-429b-83d9-378be8606a29" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.769555] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updated VIF entry in instance network info cache for port 49eb0829-6b71-461f-b57e-c7c15ecbd523. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 609.770060] env[62813]: DEBUG nova.network.neutron [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updating instance_info_cache with network_info: [{"id": "ef72a6d3-945e-47c8-b221-b21b479497d7", "address": "fa:16:3e:44:58:56", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef72a6d3-94", "ovs_interfaceid": "ef72a6d3-945e-47c8-b221-b21b479497d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "address": "fa:16:3e:4f:0a:fe", "network": {"id": "d31ea7bf-62c7-4893-bdca-0e30cc7eeeb4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-203156267", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49eb0829-6b", "ovs_interfaceid": "49eb0829-6b71-461f-b57e-c7c15ecbd523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "address": "fa:16:3e:2d:7c:ac", "network": {"id": "a5cc7ee6-670a-4757-8a08-b4e900ba29d0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1027444802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86cff95-b8", "ovs_interfaceid": "d86cff95-b8b3-41b8-96b0-ec7de5dd008e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.786848] env[62813]: DEBUG oslo_concurrency.lockutils [req-3eb03f07-8977-4f1c-b04f-58369d24ae98 req-a7ea57c3-be33-463e-812b-7ae8d67ac1ab service nova] Releasing lock "refresh_cache-65d620ad-0863-4947-945e-0e4b3c01d3a3" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.877675] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.878063] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.878624] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.745735] env[62813]: DEBUG nova.compute.manager [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Received event network-changed-ccd65f56-38a6-4d10-8583-cf86b146c2df {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 610.746085] env[62813]: DEBUG nova.compute.manager [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Refreshing instance network info cache due to event network-changed-ccd65f56-38a6-4d10-8583-cf86b146c2df. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 610.746251] env[62813]: DEBUG oslo_concurrency.lockutils [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] Acquiring lock "refresh_cache-9a448d2b-0dee-4a90-b131-e6ada542f342" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.746358] env[62813]: DEBUG oslo_concurrency.lockutils [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] Acquired lock "refresh_cache-9a448d2b-0dee-4a90-b131-e6ada542f342" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.746519] env[62813]: DEBUG nova.network.neutron [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Refreshing network info cache for port ccd65f56-38a6-4d10-8583-cf86b146c2df {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 610.976911] env[62813]: DEBUG nova.compute.manager [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Received event network-vif-plugged-3868ae93-45be-4658-ae16-01769db46a9d {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 610.977292] env[62813]: DEBUG oslo_concurrency.lockutils [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] Acquiring lock "aa76585b-55a8-437c-8dea-7731d85a3b82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.977417] env[62813]: DEBUG oslo_concurrency.lockutils [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.977539] env[62813]: DEBUG oslo_concurrency.lockutils [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.977711] env[62813]: DEBUG nova.compute.manager [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] No waiting events found dispatching network-vif-plugged-3868ae93-45be-4658-ae16-01769db46a9d {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.979039] env[62813]: WARNING nova.compute.manager [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Received unexpected event network-vif-plugged-3868ae93-45be-4658-ae16-01769db46a9d for instance with vm_state building and task_state spawning. [ 610.979039] env[62813]: DEBUG nova.compute.manager [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Received event network-changed-3868ae93-45be-4658-ae16-01769db46a9d {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 610.979171] env[62813]: DEBUG nova.compute.manager [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Refreshing instance network info cache due to event network-changed-3868ae93-45be-4658-ae16-01769db46a9d. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 610.979573] env[62813]: DEBUG oslo_concurrency.lockutils [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] Acquiring lock "refresh_cache-aa76585b-55a8-437c-8dea-7731d85a3b82" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.979573] env[62813]: DEBUG oslo_concurrency.lockutils [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] Acquired lock "refresh_cache-aa76585b-55a8-437c-8dea-7731d85a3b82" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.979573] env[62813]: DEBUG nova.network.neutron [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Refreshing network info cache for port 3868ae93-45be-4658-ae16-01769db46a9d {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 611.681498] env[62813]: DEBUG nova.network.neutron [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Updated VIF entry in instance network info cache for port ccd65f56-38a6-4d10-8583-cf86b146c2df. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 611.681498] env[62813]: DEBUG nova.network.neutron [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Updating instance_info_cache with network_info: [{"id": "ccd65f56-38a6-4d10-8583-cf86b146c2df", "address": "fa:16:3e:c9:7e:1a", "network": {"id": "fe4d8d65-f572-4301-8dd9-68a231f2fa3e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-257476310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7183133e08fb4df887a305c97c13fd77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccd65f56-38", "ovs_interfaceid": "ccd65f56-38a6-4d10-8583-cf86b146c2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.694432] env[62813]: DEBUG oslo_concurrency.lockutils [req-fd83b098-fae3-4e82-ab97-2eb5ccf8814a req-f9e2b246-63ba-4323-aa68-a53a55d88995 service nova] Releasing lock "refresh_cache-9a448d2b-0dee-4a90-b131-e6ada542f342" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.961245] env[62813]: DEBUG nova.network.neutron [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Updated VIF entry in instance network info cache for port 3868ae93-45be-4658-ae16-01769db46a9d. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 611.961245] env[62813]: DEBUG nova.network.neutron [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Updating instance_info_cache with network_info: [{"id": "3868ae93-45be-4658-ae16-01769db46a9d", "address": "fa:16:3e:8a:c4:d8", "network": {"id": "fe4d8d65-f572-4301-8dd9-68a231f2fa3e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-257476310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7183133e08fb4df887a305c97c13fd77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3868ae93-45", "ovs_interfaceid": "3868ae93-45be-4658-ae16-01769db46a9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.978109] env[62813]: DEBUG oslo_concurrency.lockutils [req-7e0c9690-9b3f-486f-8812-70682d994cc6 req-7d103038-c87e-412c-bf5b-f2fa27e10c64 service nova] Releasing lock "refresh_cache-aa76585b-55a8-437c-8dea-7731d85a3b82" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.335119] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.335119] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.743630] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "489b821e-f7d0-446f-8197-550c808e5a99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.743630] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "489b821e-f7d0-446f-8197-550c808e5a99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.966774] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b71de503-8c74-4fab-8038-bdcb0be7868c tempest-ImagesNegativeTestJSON-1418842950 tempest-ImagesNegativeTestJSON-1418842950-project-member] Acquiring lock "51817412-7e0a-48fe-8f8e-766eea45e60c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.967191] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b71de503-8c74-4fab-8038-bdcb0be7868c tempest-ImagesNegativeTestJSON-1418842950 tempest-ImagesNegativeTestJSON-1418842950-project-member] Lock "51817412-7e0a-48fe-8f8e-766eea45e60c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.699971] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1f435e39-67ab-414c-8190-340cf106387e tempest-ServersWithSpecificFlavorTestJSON-1247154468 tempest-ServersWithSpecificFlavorTestJSON-1247154468-project-member] Acquiring lock "52329a92-b534-4811-b117-2041b125f4c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.700279] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1f435e39-67ab-414c-8190-340cf106387e tempest-ServersWithSpecificFlavorTestJSON-1247154468 tempest-ServersWithSpecificFlavorTestJSON-1247154468-project-member] Lock "52329a92-b534-4811-b117-2041b125f4c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.516706] env[62813]: DEBUG oslo_concurrency.lockutils [None req-a7c08035-f827-4cc4-8e06-3d7920649966 tempest-InstanceActionsTestJSON-1307636854 tempest-InstanceActionsTestJSON-1307636854-project-member] Acquiring lock "b9e1cb26-df61-44aa-952f-e50e24766a6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.516990] env[62813]: DEBUG oslo_concurrency.lockutils [None req-a7c08035-f827-4cc4-8e06-3d7920649966 tempest-InstanceActionsTestJSON-1307636854 tempest-InstanceActionsTestJSON-1307636854-project-member] Lock "b9e1cb26-df61-44aa-952f-e50e24766a6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.736901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8fcb9a0e-ecaa-4177-a851-6cb6238a3e62 tempest-InstanceActionsNegativeTestJSON-1061991352 tempest-InstanceActionsNegativeTestJSON-1061991352-project-member] Acquiring lock "338a113c-1d04-4243-8500-fcc6d458d3ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.736901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8fcb9a0e-ecaa-4177-a851-6cb6238a3e62 tempest-InstanceActionsNegativeTestJSON-1061991352 tempest-InstanceActionsNegativeTestJSON-1061991352-project-member] Lock "338a113c-1d04-4243-8500-fcc6d458d3ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.172224] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4495db5c-fe45-4331-b47d-784a83346216 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "70a8650c-112c-44c2-b3cb-1b8cab557cac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.172522] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4495db5c-fe45-4331-b47d-784a83346216 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "70a8650c-112c-44c2-b3cb-1b8cab557cac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.448370] env[62813]: WARNING oslo_vmware.rw_handles [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 634.448370] env[62813]: ERROR oslo_vmware.rw_handles [ 634.452330] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 634.452330] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 634.452330] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Copying Virtual Disk [datastore2] vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/1e66b690-c849-4b18-93e7-8ebc6139e033/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 634.452330] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e5fefb4-0c99-452f-8bbc-4444703f6b0e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.464591] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Waiting for the task: (returnval){ [ 634.464591] env[62813]: value = "task-4267587" [ 634.464591] env[62813]: _type = "Task" [ 634.464591] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.479064] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Task: {'id': task-4267587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.882031] env[62813]: DEBUG oslo_concurrency.lockutils [None req-15d89fd7-63ab-497d-98ba-10eff0a7ed77 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "3684a5c7-4998-4d5b-8cb6-7df3a9bf9270" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.882031] env[62813]: DEBUG oslo_concurrency.lockutils [None req-15d89fd7-63ab-497d-98ba-10eff0a7ed77 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "3684a5c7-4998-4d5b-8cb6-7df3a9bf9270" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.980514] env[62813]: DEBUG oslo_vmware.exceptions [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 634.981216] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.985980] env[62813]: ERROR nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 634.985980] env[62813]: Faults: ['InvalidArgument'] [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Traceback (most recent call last): [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] yield resources [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self.driver.spawn(context, instance, image_meta, [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self._fetch_image_if_missing(context, vi) [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] image_cache(vi, tmp_image_ds_loc) [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] vm_util.copy_virtual_disk( [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] session._wait_for_task(vmdk_copy_task) [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] return self.wait_for_task(task_ref) [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] return evt.wait() [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] result = hub.switch() [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] return self.greenlet.switch() [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self.f(*self.args, **self.kw) [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] raise exceptions.translate_fault(task_info.error) [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Faults: ['InvalidArgument'] [ 634.985980] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] [ 634.987888] env[62813]: INFO nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Terminating instance [ 634.989194] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.989415] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 634.990051] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 634.990245] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 634.990480] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c532f62-ae04-4877-9748-c06979bb72eb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.993128] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5bb69e-ead6-4d63-bea5-35791046b6e2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.001443] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 635.001732] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a30e9cac-1126-411d-980d-1b6770dac37d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.005525] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.005525] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 635.005887] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5630e4e1-922d-4836-8b76-981bf0b0004c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.012021] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Waiting for the task: (returnval){ [ 635.012021] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]529e18b9-1ced-ee1c-645e-8cba28213ca4" [ 635.012021] env[62813]: _type = "Task" [ 635.012021] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.020837] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]529e18b9-1ced-ee1c-645e-8cba28213ca4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.078401] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 635.078695] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 635.078891] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Deleting the datastore file [datastore2] 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.079194] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f7b8f19-7083-4d89-9751-ee615136da85 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.086007] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Waiting for the task: (returnval){ [ 635.086007] env[62813]: value = "task-4267589" [ 635.086007] env[62813]: _type = "Task" [ 635.086007] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.094923] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Task: {'id': task-4267589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.531577] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 635.531844] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Creating directory with path [datastore2] vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.534024] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e4642ed-ea82-40a7-9575-d3daa0b193ea {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.548467] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Created directory with path [datastore2] vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.550085] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Fetch image to [datastore2] vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 635.553183] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 635.553183] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32be1bb8-6e0f-4ee9-9f0b-7f721bd7efd4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.568753] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cf6325-03f9-43f3-8643-ebcc9652cb4d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.581391] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8327fa0-04ae-40ee-a992-5991ef7d0c7d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.623999] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97044ab9-a671-41ec-b685-09e09122ae4f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.630647] env[62813]: DEBUG oslo_vmware.api [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Task: {'id': task-4267589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0768} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.630647] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.630647] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 635.630647] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 635.630647] env[62813]: INFO nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Took 0.64 seconds to destroy the instance on the hypervisor. [ 635.634163] env[62813]: DEBUG nova.compute.claims [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 635.634163] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.634163] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.636952] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-06fb3968-81f9-44e6-9267-a11411fbd312 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.739114] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 635.968341] env[62813]: DEBUG oslo_vmware.rw_handles [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 636.032473] env[62813]: DEBUG oslo_vmware.rw_handles [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 636.032473] env[62813]: DEBUG oslo_vmware.rw_handles [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 636.136131] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78a3f82-5111-4c18-a93e-354c4ace8332 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.145069] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8db107f-3af3-4adb-9624-046d36c93f64 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.182039] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75b7c43-53c3-406a-8640-bf98fe346b66 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.193804] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa0aba6-7217-45b7-b4b8-ca1fa84b7da3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.217833] env[62813]: DEBUG nova.compute.provider_tree [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.232320] env[62813]: DEBUG nova.scheduler.client.report [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.258038] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.623s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.258664] env[62813]: ERROR nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.258664] env[62813]: Faults: ['InvalidArgument'] [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Traceback (most recent call last): [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self.driver.spawn(context, instance, image_meta, [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self._fetch_image_if_missing(context, vi) [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] image_cache(vi, tmp_image_ds_loc) [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] vm_util.copy_virtual_disk( [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] session._wait_for_task(vmdk_copy_task) [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] return self.wait_for_task(task_ref) [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] return evt.wait() [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] result = hub.switch() [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] return self.greenlet.switch() [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] self.f(*self.args, **self.kw) [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] raise exceptions.translate_fault(task_info.error) [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Faults: ['InvalidArgument'] [ 636.258664] env[62813]: ERROR nova.compute.manager [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] [ 636.259562] env[62813]: DEBUG nova.compute.utils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 636.264387] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Build of instance 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f was re-scheduled: A specified parameter was not correct: fileType [ 636.264387] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 636.264862] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 636.265573] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 636.265573] env[62813]: DEBUG nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 636.265786] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 636.880271] env[62813]: DEBUG nova.network.neutron [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.903614] env[62813]: INFO nova.compute.manager [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f] Took 0.64 seconds to deallocate network for instance. [ 637.081417] env[62813]: INFO nova.scheduler.client.report [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Deleted allocations for instance 6ff9b201-b6a2-45a9-bb6b-0acb8c82405f [ 637.124989] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd49b2d1-bbfb-4a9a-87a3-5a175a7b3189 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "6ff9b201-b6a2-45a9-bb6b-0acb8c82405f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.298s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.167315] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 637.190181] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.190427] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.325265] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.325652] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.327705] env[62813]: INFO nova.compute.claims [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.455302] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0cae137f-23f0-434d-84f4-4382b69a7b06 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "704614e0-aaa3-48b6-8208-47af7ca0f367" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.455834] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0cae137f-23f0-434d-84f4-4382b69a7b06 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "704614e0-aaa3-48b6-8208-47af7ca0f367" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.791569] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27fc29d-4012-405c-b057-a035eeb4ad60 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.803055] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7875f5ca-abef-4c1d-a25c-e00cdcb98295 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.838240] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543853c0-0080-4c6f-a926-75e5eac0d605 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.846254] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfea9dc-6dbd-44ab-b845-cb86125ee3da {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.861070] env[62813]: DEBUG nova.compute.provider_tree [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.872108] env[62813]: DEBUG nova.scheduler.client.report [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.896882] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.571s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.897476] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 637.947271] env[62813]: DEBUG nova.compute.utils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.954022] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 637.954022] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 637.968024] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 638.046231] env[62813]: DEBUG nova.policy [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1917514413c54061a5b8c2cfafb2d4e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'acce9fd1112b4ca9bd5429abd122319b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 638.064873] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 638.105729] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.105974] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.106164] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.106388] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.106550] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.106730] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.106918] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.107096] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.107291] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.107477] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.107655] env[62813]: DEBUG nova.virt.hardware [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.108568] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b75820-6b78-44bd-b074-4d1e15cd85bb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.118351] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a52ffc6-51ed-43dc-955e-a49a0f4065ac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.543139] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Successfully created port: 554cddee-4c08-4af6-9e43-fd21d770948e {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.233675] env[62813]: DEBUG oslo_concurrency.lockutils [None req-66590f55-6eda-4198-b9e4-49e3ca294936 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "b845396e-1641-4668-b687-348f1ee8b6f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.234471] env[62813]: DEBUG oslo_concurrency.lockutils [None req-66590f55-6eda-4198-b9e4-49e3ca294936 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b845396e-1641-4668-b687-348f1ee8b6f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.663017] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Successfully updated port: 554cddee-4c08-4af6-9e43-fd21d770948e {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 639.679685] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "refresh_cache-4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.682435] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquired lock "refresh_cache-4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.682802] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 639.901259] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.910685] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b3318a6b-df2a-4728-81d8-99b938e8c641 tempest-ServerGroupTestJSON-752837002 tempest-ServerGroupTestJSON-752837002-project-member] Acquiring lock "4e783b92-0668-4e70-9848-4b4320318603" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.911075] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b3318a6b-df2a-4728-81d8-99b938e8c641 tempest-ServerGroupTestJSON-752837002 tempest-ServerGroupTestJSON-752837002-project-member] Lock "4e783b92-0668-4e70-9848-4b4320318603" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.179069] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Updating instance_info_cache with network_info: [{"id": "554cddee-4c08-4af6-9e43-fd21d770948e", "address": "fa:16:3e:21:0f:e4", "network": {"id": "516701be-a018-46a6-a744-3141eaf91117", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1013191861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "acce9fd1112b4ca9bd5429abd122319b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap554cddee-4c", "ovs_interfaceid": "554cddee-4c08-4af6-9e43-fd21d770948e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.218525] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Releasing lock "refresh_cache-4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.218525] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance network_info: |[{"id": "554cddee-4c08-4af6-9e43-fd21d770948e", "address": "fa:16:3e:21:0f:e4", "network": {"id": "516701be-a018-46a6-a744-3141eaf91117", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1013191861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "acce9fd1112b4ca9bd5429abd122319b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap554cddee-4c", "ovs_interfaceid": "554cddee-4c08-4af6-9e43-fd21d770948e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.218525] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:0f:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24727047-6358-4015-86c1-394ab07fb88f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '554cddee-4c08-4af6-9e43-fd21d770948e', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.228461] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Creating folder: Project (acce9fd1112b4ca9bd5429abd122319b). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 640.229263] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e6170e8-2fad-431c-9382-ff1d1e5c980b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.244097] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Created folder: Project (acce9fd1112b4ca9bd5429abd122319b) in parent group-v840812. [ 640.244097] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Creating folder: Instances. Parent ref: group-v840841. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 640.244447] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ab8a46c-3338-4d62-8234-158b6e940c8e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.258411] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Created folder: Instances in parent group-v840841. [ 640.258928] env[62813]: DEBUG oslo.service.loopingcall [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.259380] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 640.259747] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-063eeb51-c082-4286-b433-4152ba3cc170 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.284308] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.284308] env[62813]: value = "task-4267592" [ 640.284308] env[62813]: _type = "Task" [ 640.284308] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.302936] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267592, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.798436] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267592, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.297848] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267592, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.800233] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267592, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.931080] env[62813]: DEBUG nova.compute.manager [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Received event network-vif-plugged-554cddee-4c08-4af6-9e43-fd21d770948e {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 641.931080] env[62813]: DEBUG oslo_concurrency.lockutils [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] Acquiring lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.935022] env[62813]: DEBUG oslo_concurrency.lockutils [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.935022] env[62813]: DEBUG oslo_concurrency.lockutils [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.935022] env[62813]: DEBUG nova.compute.manager [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] No waiting events found dispatching network-vif-plugged-554cddee-4c08-4af6-9e43-fd21d770948e {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 641.935509] env[62813]: WARNING nova.compute.manager [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Received unexpected event network-vif-plugged-554cddee-4c08-4af6-9e43-fd21d770948e for instance with vm_state building and task_state spawning. [ 641.935712] env[62813]: DEBUG nova.compute.manager [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Received event network-changed-554cddee-4c08-4af6-9e43-fd21d770948e {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 641.935756] env[62813]: DEBUG nova.compute.manager [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Refreshing instance network info cache due to event network-changed-554cddee-4c08-4af6-9e43-fd21d770948e. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 641.936335] env[62813]: DEBUG oslo_concurrency.lockutils [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] Acquiring lock "refresh_cache-4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.936335] env[62813]: DEBUG oslo_concurrency.lockutils [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] Acquired lock "refresh_cache-4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.936335] env[62813]: DEBUG nova.network.neutron [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Refreshing network info cache for port 554cddee-4c08-4af6-9e43-fd21d770948e {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 642.304367] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267592, 'name': CreateVM_Task, 'duration_secs': 1.578198} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.304367] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 642.304797] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.304797] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.305584] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.305584] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4745986-8171-4b7a-9825-3eb9c580ad62 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.311400] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Waiting for the task: (returnval){ [ 642.311400] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52e416c0-f719-75f7-03f3-5a94ca4045d6" [ 642.311400] env[62813]: _type = "Task" [ 642.311400] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.325466] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52e416c0-f719-75f7-03f3-5a94ca4045d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.824634] env[62813]: DEBUG nova.network.neutron [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Updated VIF entry in instance network info cache for port 554cddee-4c08-4af6-9e43-fd21d770948e. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 642.824968] env[62813]: DEBUG nova.network.neutron [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Updating instance_info_cache with network_info: [{"id": "554cddee-4c08-4af6-9e43-fd21d770948e", "address": "fa:16:3e:21:0f:e4", "network": {"id": "516701be-a018-46a6-a744-3141eaf91117", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1013191861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "acce9fd1112b4ca9bd5429abd122319b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap554cddee-4c", "ovs_interfaceid": "554cddee-4c08-4af6-9e43-fd21d770948e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.826998] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.827243] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.827570] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.839807] env[62813]: DEBUG oslo_concurrency.lockutils [req-da94cedd-b64d-4ee0-b534-b2cbdc06afab req-c9bf49d3-f4b3-4e03-8b23-33c7b9ad667e service nova] Releasing lock "refresh_cache-4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.636904] env[62813]: DEBUG oslo_concurrency.lockutils [None req-09f04951-984c-43e3-8d6f-b5fd6510acb3 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Acquiring lock "a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.637239] env[62813]: DEBUG oslo_concurrency.lockutils [None req-09f04951-984c-43e3-8d6f-b5fd6510acb3 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.424590] env[62813]: DEBUG oslo_concurrency.lockutils [None req-394d360c-2cce-4de9-a54b-a742cb2d6faf tempest-ServersAdminNegativeTestJSON-1375939108 tempest-ServersAdminNegativeTestJSON-1375939108-project-member] Acquiring lock "51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.424590] env[62813]: DEBUG oslo_concurrency.lockutils [None req-394d360c-2cce-4de9-a54b-a742cb2d6faf tempest-ServersAdminNegativeTestJSON-1375939108 tempest-ServersAdminNegativeTestJSON-1375939108-project-member] Lock "51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.172778] env[62813]: DEBUG oslo_concurrency.lockutils [None req-be42db35-1275-4746-aa01-dc7440d88d06 tempest-ServersV294TestFqdnHostnames-788616215 tempest-ServersV294TestFqdnHostnames-788616215-project-member] Acquiring lock "097bae8e-614d-4322-b767-d56e0dc1b658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.172778] env[62813]: DEBUG oslo_concurrency.lockutils [None req-be42db35-1275-4746-aa01-dc7440d88d06 tempest-ServersV294TestFqdnHostnames-788616215 tempest-ServersV294TestFqdnHostnames-788616215-project-member] Lock "097bae8e-614d-4322-b767-d56e0dc1b658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.002214] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2c38552a-fe03-4fdc-9a39-8ca3ee2ca58f tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Acquiring lock "22549912-2253-42bb-b2d7-8d0512c2a9d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.002214] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2c38552a-fe03-4fdc-9a39-8ca3ee2ca58f tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "22549912-2253-42bb-b2d7-8d0512c2a9d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.496553] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e2fe2488-28fd-41f7-968a-a894c48c1428 tempest-ServerDiagnosticsV248Test-429699338 tempest-ServerDiagnosticsV248Test-429699338-project-member] Acquiring lock "7cdd84be-1e0a-4e4a-9e40-b4d589f08914" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.496863] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e2fe2488-28fd-41f7-968a-a894c48c1428 tempest-ServerDiagnosticsV248Test-429699338 tempest-ServerDiagnosticsV248Test-429699338-project-member] Lock "7cdd84be-1e0a-4e4a-9e40-b4d589f08914" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.624105] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.652992] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.653226] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.653388] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.653536] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 650.163721] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.163979] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 650.164214] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 650.190174] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 650.190174] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.202945] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.203220] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.203366] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.203555] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 650.204731] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa298ca-12ea-458e-a18d-35135bc9314e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.214951] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290ddf9a-cb26-4679-a8d4-f450486f93bb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.232802] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4993300f-e620-4cd9-8d55-d6d2da3ac364 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.241369] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07a238c-c4a8-4d8b-ab60-500e9e84f851 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.278859] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180698MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 650.279041] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.279255] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.374144] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c56debdf-ab77-4151-bc20-6973ae594d87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.374342] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 022718e2-52ec-4130-81b0-fb39e57d6efe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.374444] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.374585] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.374691] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.374954] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.374954] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 09aa702f-a28c-429b-83d9-378be8606a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.375053] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.375167] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.375284] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 650.404443] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.436688] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.448454] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.461313] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51817412-7e0a-48fe-8f8e-766eea45e60c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.475983] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 52329a92-b534-4811-b117-2041b125f4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.490463] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b9e1cb26-df61-44aa-952f-e50e24766a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.507077] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 338a113c-1d04-4243-8500-fcc6d458d3ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.543102] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 70a8650c-112c-44c2-b3cb-1b8cab557cac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.555270] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 3684a5c7-4998-4d5b-8cb6-7df3a9bf9270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.565327] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.576338] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 704614e0-aaa3-48b6-8208-47af7ca0f367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.590608] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b845396e-1641-4668-b687-348f1ee8b6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.605941] env[62813]: DEBUG oslo_concurrency.lockutils [None req-339dccd5-967e-4e11-925b-b1d62468c6e5 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Acquiring lock "f863dfb3-98d5-473b-9e41-85984e350070" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.606194] env[62813]: DEBUG oslo_concurrency.lockutils [None req-339dccd5-967e-4e11-925b-b1d62468c6e5 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "f863dfb3-98d5-473b-9e41-85984e350070" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.606722] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e783b92-0668-4e70-9848-4b4320318603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.617561] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.629871] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.643726] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 097bae8e-614d-4322-b767-d56e0dc1b658 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.656264] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 22549912-2253-42bb-b2d7-8d0512c2a9d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.667999] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7cdd84be-1e0a-4e4a-9e40-b4d589f08914 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.668218] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 650.669029] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 651.117473] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cd3929-416e-4765-b050-919d4e3d4945 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.126253] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcb5fb0-0ad6-4862-b1c2-cced7615b231 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.163622] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5722f9be-f3a0-43f6-b062-e0fbac66cc3c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.172197] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2267e5b9-7d3e-4bb0-822d-3ea5e47f825b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.189639] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.199710] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.218965] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 651.219231] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.940s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.193593] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.193905] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.193977] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.194164] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.494756] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3d7f5846-71d8-42dc-8a7c-3ea7a9bb21b6 tempest-ServersAdmin275Test-2144933203 tempest-ServersAdmin275Test-2144933203-project-member] Acquiring lock "952194e3-6318-4ecc-8d48-bda3811c4d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.495133] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3d7f5846-71d8-42dc-8a7c-3ea7a9bb21b6 tempest-ServersAdmin275Test-2144933203 tempest-ServersAdmin275Test-2144933203-project-member] Lock "952194e3-6318-4ecc-8d48-bda3811c4d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.430888] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8ddb9483-faed-4843-851d-b9aec164990f tempest-ServerActionsV293TestJSON-132017951 tempest-ServerActionsV293TestJSON-132017951-project-member] Acquiring lock "5b935b76-027b-4b4a-a61b-3e4cc2f36c08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.431205] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8ddb9483-faed-4843-851d-b9aec164990f tempest-ServerActionsV293TestJSON-132017951 tempest-ServerActionsV293TestJSON-132017951-project-member] Lock "5b935b76-027b-4b4a-a61b-3e4cc2f36c08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.419229] env[62813]: WARNING oslo_vmware.rw_handles [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 682.419229] env[62813]: ERROR oslo_vmware.rw_handles [ 682.419542] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 682.421346] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 682.421656] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Copying Virtual Disk [datastore2] vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/64a9b171-c881-46f4-8517-4737e016e425/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 682.421973] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8628380-a4d1-430c-939c-24245687e6aa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.430613] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Waiting for the task: (returnval){ [ 682.430613] env[62813]: value = "task-4267604" [ 682.430613] env[62813]: _type = "Task" [ 682.430613] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.439253] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Task: {'id': task-4267604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.941910] env[62813]: DEBUG oslo_vmware.exceptions [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 682.942292] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.942855] env[62813]: ERROR nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 682.942855] env[62813]: Faults: ['InvalidArgument'] [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Traceback (most recent call last): [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] yield resources [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self.driver.spawn(context, instance, image_meta, [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self._fetch_image_if_missing(context, vi) [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] image_cache(vi, tmp_image_ds_loc) [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] vm_util.copy_virtual_disk( [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] session._wait_for_task(vmdk_copy_task) [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] return self.wait_for_task(task_ref) [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] return evt.wait() [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] result = hub.switch() [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] return self.greenlet.switch() [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self.f(*self.args, **self.kw) [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] raise exceptions.translate_fault(task_info.error) [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Faults: ['InvalidArgument'] [ 682.942855] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] [ 682.943525] env[62813]: INFO nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Terminating instance [ 682.945154] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.945154] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 682.945323] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54b026f0-b0ac-43a5-b1e1-3595974db0f1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.948462] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 682.948669] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 682.949503] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c558c3-0166-4ded-abcf-bff736c6cbc1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.953565] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 682.953740] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 682.956392] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c286c422-426f-41df-a3f2-17671a3c5a76 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.958697] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 682.958914] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5eb759d2-11fa-4116-acde-3626b4c8eb42 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.963725] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Waiting for the task: (returnval){ [ 682.963725] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52df9b5d-ed28-2281-5004-22d1d95f14a2" [ 682.963725] env[62813]: _type = "Task" [ 682.963725] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.971822] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52df9b5d-ed28-2281-5004-22d1d95f14a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.040053] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 683.040053] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 683.040053] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Deleting the datastore file [datastore2] 022718e2-52ec-4130-81b0-fb39e57d6efe {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.040323] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cccc6c21-f4c4-4fd5-a890-6c71d51d85e7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.048064] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Waiting for the task: (returnval){ [ 683.048064] env[62813]: value = "task-4267606" [ 683.048064] env[62813]: _type = "Task" [ 683.048064] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.056228] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Task: {'id': task-4267606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.474595] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 683.474899] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Creating directory with path [datastore2] vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.475131] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3c312da-41ae-4980-95cd-8e04d0fb9cf3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.489024] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Created directory with path [datastore2] vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.489255] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Fetch image to [datastore2] vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 683.489430] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 683.490235] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61971311-399d-4acf-ac94-d96f8410581c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.497618] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3efa37a-7826-4513-96b3-fa5473acc16a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.507451] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c49542-c5e4-4fb7-b3f2-f703a987014c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.538667] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6236c1-15f4-4100-88fb-c214b2aef2a0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.545540] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3212c8a2-4106-4b9a-a768-4a6568b0c8fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.555703] env[62813]: DEBUG oslo_vmware.api [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Task: {'id': task-4267606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07918} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.555977] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.556208] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 683.556413] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 683.556597] env[62813]: INFO nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Took 0.61 seconds to destroy the instance on the hypervisor. [ 683.558805] env[62813]: DEBUG nova.compute.claims [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 683.559021] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.559246] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.642492] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 683.698647] env[62813]: DEBUG oslo_vmware.rw_handles [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 683.759872] env[62813]: DEBUG oslo_vmware.rw_handles [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 683.760186] env[62813]: DEBUG oslo_vmware.rw_handles [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 684.092922] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16a78d9-a601-4c03-a53a-f226770193ac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.100923] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc098d4-6ecd-45c2-9583-a28b7918d161 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.133393] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acf228d-72d2-4c2a-9691-88cbe57b7a7c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.143922] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8159b29e-0ca6-4473-9931-9e88f03d6e78 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.161612] env[62813]: DEBUG nova.compute.provider_tree [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.170537] env[62813]: DEBUG nova.scheduler.client.report [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.185201] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.626s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.185749] env[62813]: ERROR nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 684.185749] env[62813]: Faults: ['InvalidArgument'] [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Traceback (most recent call last): [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self.driver.spawn(context, instance, image_meta, [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self._fetch_image_if_missing(context, vi) [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] image_cache(vi, tmp_image_ds_loc) [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] vm_util.copy_virtual_disk( [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] session._wait_for_task(vmdk_copy_task) [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] return self.wait_for_task(task_ref) [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] return evt.wait() [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] result = hub.switch() [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] return self.greenlet.switch() [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] self.f(*self.args, **self.kw) [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] raise exceptions.translate_fault(task_info.error) [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Faults: ['InvalidArgument'] [ 684.185749] env[62813]: ERROR nova.compute.manager [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] [ 684.186564] env[62813]: DEBUG nova.compute.utils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 684.188070] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Build of instance 022718e2-52ec-4130-81b0-fb39e57d6efe was re-scheduled: A specified parameter was not correct: fileType [ 684.188070] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 684.188494] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 684.188675] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 684.188833] env[62813]: DEBUG nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 684.189037] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 684.620737] env[62813]: DEBUG nova.network.neutron [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.632940] env[62813]: INFO nova.compute.manager [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 022718e2-52ec-4130-81b0-fb39e57d6efe] Took 0.44 seconds to deallocate network for instance. [ 684.753247] env[62813]: INFO nova.scheduler.client.report [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Deleted allocations for instance 022718e2-52ec-4130-81b0-fb39e57d6efe [ 684.780596] env[62813]: DEBUG oslo_concurrency.lockutils [None req-da4474bf-1a35-4d0a-b510-3fd67f0832ce tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "022718e2-52ec-4130-81b0-fb39e57d6efe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.090s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.791665] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 684.850458] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.850709] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.852200] env[62813]: INFO nova.compute.claims [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.327536] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa072b3-d767-41f0-b77c-cfd217f86e7c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.336646] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acecc2c-0577-44ca-be86-8d443b0a798d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.371742] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82447eb1-55af-453c-83e9-3e7562e8c7c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.380014] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac2c5df-b51f-41ea-be34-d9269bdcf523 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.394126] env[62813]: DEBUG nova.compute.provider_tree [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.403526] env[62813]: DEBUG nova.scheduler.client.report [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.419647] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.569s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.420195] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 685.458024] env[62813]: DEBUG nova.compute.utils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 685.459831] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 685.459831] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 685.468398] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 685.535731] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 685.562021] env[62813]: DEBUG nova.policy [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69a0566308564f989871e17774eff5f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '883f9938564e45a09c546c3e031279d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 685.572033] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 685.572359] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 685.572441] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.572621] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 685.572836] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.572994] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 685.573246] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 685.573442] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 685.573614] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 685.573790] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 685.573961] env[62813]: DEBUG nova.virt.hardware [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 685.574863] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9aee8a-ccc6-452b-b943-3b6b499a9b7f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.584086] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e5ec26-b28c-4a8b-adb0-d0f96e760097 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.983917] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Successfully created port: f1a596ab-959b-448d-a6cf-2075c185d7ad {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.818537] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Successfully updated port: f1a596ab-959b-448d-a6cf-2075c185d7ad {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.832431] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "refresh_cache-76b0e03d-9636-4328-bfd5-17c434cfae72" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.832571] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquired lock "refresh_cache-76b0e03d-9636-4328-bfd5-17c434cfae72" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.832717] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 686.891359] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.077827] env[62813]: DEBUG nova.compute.manager [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Received event network-vif-plugged-f1a596ab-959b-448d-a6cf-2075c185d7ad {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 687.078098] env[62813]: DEBUG oslo_concurrency.lockutils [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] Acquiring lock "76b0e03d-9636-4328-bfd5-17c434cfae72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.078316] env[62813]: DEBUG oslo_concurrency.lockutils [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.078481] env[62813]: DEBUG oslo_concurrency.lockutils [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.078651] env[62813]: DEBUG nova.compute.manager [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] No waiting events found dispatching network-vif-plugged-f1a596ab-959b-448d-a6cf-2075c185d7ad {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 687.078882] env[62813]: WARNING nova.compute.manager [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Received unexpected event network-vif-plugged-f1a596ab-959b-448d-a6cf-2075c185d7ad for instance with vm_state building and task_state spawning. [ 687.078981] env[62813]: DEBUG nova.compute.manager [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Received event network-changed-f1a596ab-959b-448d-a6cf-2075c185d7ad {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 687.079189] env[62813]: DEBUG nova.compute.manager [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Refreshing instance network info cache due to event network-changed-f1a596ab-959b-448d-a6cf-2075c185d7ad. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 687.079362] env[62813]: DEBUG oslo_concurrency.lockutils [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] Acquiring lock "refresh_cache-76b0e03d-9636-4328-bfd5-17c434cfae72" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.133808] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Updating instance_info_cache with network_info: [{"id": "f1a596ab-959b-448d-a6cf-2075c185d7ad", "address": "fa:16:3e:be:7c:e2", "network": {"id": "2863e054-ccb7-49d3-b0ec-66823188ef26", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030251734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "883f9938564e45a09c546c3e031279d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a596ab-95", "ovs_interfaceid": "f1a596ab-959b-448d-a6cf-2075c185d7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.148365] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Releasing lock "refresh_cache-76b0e03d-9636-4328-bfd5-17c434cfae72" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.148683] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance network_info: |[{"id": "f1a596ab-959b-448d-a6cf-2075c185d7ad", "address": "fa:16:3e:be:7c:e2", "network": {"id": "2863e054-ccb7-49d3-b0ec-66823188ef26", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030251734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "883f9938564e45a09c546c3e031279d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a596ab-95", "ovs_interfaceid": "f1a596ab-959b-448d-a6cf-2075c185d7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 687.148982] env[62813]: DEBUG oslo_concurrency.lockutils [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] Acquired lock "refresh_cache-76b0e03d-9636-4328-bfd5-17c434cfae72" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.149208] env[62813]: DEBUG nova.network.neutron [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Refreshing network info cache for port f1a596ab-959b-448d-a6cf-2075c185d7ad {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 687.150316] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:7c:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1a596ab-959b-448d-a6cf-2075c185d7ad', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.157743] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Creating folder: Project (883f9938564e45a09c546c3e031279d0). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 687.161015] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f9b6051-3b80-4033-83d4-01d497b4be90 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.171808] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Created folder: Project (883f9938564e45a09c546c3e031279d0) in parent group-v840812. [ 687.172020] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Creating folder: Instances. Parent ref: group-v840848. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 687.172284] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e41fe1f-945d-42fe-862b-642e37ec16a1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.182350] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Created folder: Instances in parent group-v840848. [ 687.182705] env[62813]: DEBUG oslo.service.loopingcall [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 687.182830] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 687.183221] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-febeac2e-a39d-4881-9cdb-93b3fa093f52 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.205124] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.205124] env[62813]: value = "task-4267609" [ 687.205124] env[62813]: _type = "Task" [ 687.205124] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.218549] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267609, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.555018] env[62813]: DEBUG nova.network.neutron [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Updated VIF entry in instance network info cache for port f1a596ab-959b-448d-a6cf-2075c185d7ad. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 687.555451] env[62813]: DEBUG nova.network.neutron [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Updating instance_info_cache with network_info: [{"id": "f1a596ab-959b-448d-a6cf-2075c185d7ad", "address": "fa:16:3e:be:7c:e2", "network": {"id": "2863e054-ccb7-49d3-b0ec-66823188ef26", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030251734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "883f9938564e45a09c546c3e031279d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a596ab-95", "ovs_interfaceid": "f1a596ab-959b-448d-a6cf-2075c185d7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.565698] env[62813]: DEBUG oslo_concurrency.lockutils [req-c7aefbc3-55e9-45f9-bf3f-9334cff14cbb req-4c1b0fe8-fa19-4d99-9af2-8756f2a0c9a9 service nova] Releasing lock "refresh_cache-76b0e03d-9636-4328-bfd5-17c434cfae72" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.715025] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267609, 'name': CreateVM_Task, 'duration_secs': 0.320566} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.715326] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 687.716173] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.716275] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.716663] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 687.717045] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6e6a18b-90f4-43a2-a3b6-f0a4e1289ada {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.722215] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Waiting for the task: (returnval){ [ 687.722215] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52e0cb38-d907-bb0c-a3e4-c5a33bb63432" [ 687.722215] env[62813]: _type = "Task" [ 687.722215] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.730560] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52e0cb38-d907-bb0c-a3e4-c5a33bb63432, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.998289] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fcde9f16-cd7a-4130-a398-399d73f669f4 tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Acquiring lock "8852d058-a494-47e4-977d-289b5126f7ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.998548] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fcde9f16-cd7a-4130-a398-399d73f669f4 tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "8852d058-a494-47e4-977d-289b5126f7ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.233812] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.234151] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.234300] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.164065] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.164345] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.164501] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.164652] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 712.164582] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 712.164857] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 712.164890] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 712.187546] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.187719] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.187834] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.187962] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188104] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188227] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188346] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188464] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188584] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188702] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 712.188822] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 712.189333] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 712.189535] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 712.189703] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 712.201916] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.202196] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.202316] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.202471] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 712.203555] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f5dd9f-d96d-4129-b0e6-09e5e9deea6c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.213831] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c74ef13-81d6-4d67-8842-e7e44b22b6e3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.230231] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ccb0fa-1276-4b2f-9de4-963f66f41491 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.237172] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aae8886-4a2e-434a-8928-e9afa3e04712 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.266355] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180740MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 712.266500] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.266699] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.338906] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c56debdf-ab77-4151-bc20-6973ae594d87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339097] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339233] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339357] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339479] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339600] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 09aa702f-a28c-429b-83d9-378be8606a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339718] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339833] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.339947] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.340108] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.353705] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.364734] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.375816] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51817412-7e0a-48fe-8f8e-766eea45e60c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.387017] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 52329a92-b534-4811-b117-2041b125f4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.398075] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b9e1cb26-df61-44aa-952f-e50e24766a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.408173] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 338a113c-1d04-4243-8500-fcc6d458d3ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.418476] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 70a8650c-112c-44c2-b3cb-1b8cab557cac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.432460] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 3684a5c7-4998-4d5b-8cb6-7df3a9bf9270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.449374] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.460236] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 704614e0-aaa3-48b6-8208-47af7ca0f367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.473024] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b845396e-1641-4668-b687-348f1ee8b6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.484026] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e783b92-0668-4e70-9848-4b4320318603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.493977] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.504675] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.514920] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 097bae8e-614d-4322-b767-d56e0dc1b658 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.525045] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 22549912-2253-42bb-b2d7-8d0512c2a9d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.536347] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7cdd84be-1e0a-4e4a-9e40-b4d589f08914 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.548309] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f863dfb3-98d5-473b-9e41-85984e350070 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.557962] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 952194e3-6318-4ecc-8d48-bda3811c4d49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.568498] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5b935b76-027b-4b4a-a61b-3e4cc2f36c08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.579577] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 8852d058-a494-47e4-977d-289b5126f7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.579847] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 712.580032] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 712.956467] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721cd18f-55c9-4b33-a68a-76ee3b3bcd06 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.964885] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba62bede-61f2-4d8d-bfb4-b353ae3f55c8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.996093] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b8a5fb-2880-4c64-a6f1-e2f51cef208b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.004909] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c80e84-424c-456c-9254-7b2c4c64f314 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.019867] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.031272] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 713.046140] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 713.046926] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.780s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.020533] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 714.020930] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.997767] env[62813]: WARNING oslo_vmware.rw_handles [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 731.997767] env[62813]: ERROR oslo_vmware.rw_handles [ 731.997767] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 731.999613] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 731.999907] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Copying Virtual Disk [datastore2] vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/973266c9-3b77-470d-b632-55bcc08a5390/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 732.000240] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ebe18e7-2144-421d-87c0-3c11b1c327d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.008237] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Waiting for the task: (returnval){ [ 732.008237] env[62813]: value = "task-4267610" [ 732.008237] env[62813]: _type = "Task" [ 732.008237] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.016368] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Task: {'id': task-4267610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.518894] env[62813]: DEBUG oslo_vmware.exceptions [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 732.519196] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.519916] env[62813]: ERROR nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 732.519916] env[62813]: Faults: ['InvalidArgument'] [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Traceback (most recent call last): [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] yield resources [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self.driver.spawn(context, instance, image_meta, [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self._fetch_image_if_missing(context, vi) [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] image_cache(vi, tmp_image_ds_loc) [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] vm_util.copy_virtual_disk( [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] session._wait_for_task(vmdk_copy_task) [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] return self.wait_for_task(task_ref) [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] return evt.wait() [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] result = hub.switch() [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] return self.greenlet.switch() [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self.f(*self.args, **self.kw) [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] raise exceptions.translate_fault(task_info.error) [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Faults: ['InvalidArgument'] [ 732.519916] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] [ 732.520840] env[62813]: INFO nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Terminating instance [ 732.522027] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.522114] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.522740] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 732.522927] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 732.523168] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59c8cdbf-8824-4f22-82b6-7c14281fe74c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.525565] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4da64ad-af2b-4bb5-ba8b-0cb2bf465ca7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.533148] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 732.533426] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85ca001d-9f4c-4279-b139-b41244ee6876 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.535769] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.535951] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 732.536972] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95e79a29-30d0-473d-b027-720ef5493108 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.543351] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Waiting for the task: (returnval){ [ 732.543351] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]529fe443-c813-cbca-1b2a-72521a234dfb" [ 732.543351] env[62813]: _type = "Task" [ 732.543351] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.552430] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]529fe443-c813-cbca-1b2a-72521a234dfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.608861] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 732.609121] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 732.609323] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Deleting the datastore file [datastore2] c56debdf-ab77-4151-bc20-6973ae594d87 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 732.609600] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc0e380a-c1f9-44b2-ad49-4b1eb7ac261f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.616224] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Waiting for the task: (returnval){ [ 732.616224] env[62813]: value = "task-4267612" [ 732.616224] env[62813]: _type = "Task" [ 732.616224] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.624686] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Task: {'id': task-4267612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.053684] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 733.054469] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Creating directory with path [datastore2] vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.054842] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-358ccfc8-a432-4316-83b2-3aec6fd31927 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.068458] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Created directory with path [datastore2] vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.068458] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Fetch image to [datastore2] vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 733.068458] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 733.069044] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87572492-73ef-40f4-a624-0e8e41103435 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.076687] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbbe893-e23d-4f28-b1a7-b258763aaad8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.086968] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3f504d-3438-4778-98d9-b44d725e74af {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.122932] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3a51ba-9f7d-44f1-95b4-ea8de5f1ce34 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.132359] env[62813]: DEBUG oslo_vmware.api [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Task: {'id': task-4267612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078408} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.132893] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 733.133097] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 733.133277] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 733.133456] env[62813]: INFO nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Took 0.61 seconds to destroy the instance on the hypervisor. [ 733.135627] env[62813]: DEBUG nova.compute.claims [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 733.135800] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.136027] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.139976] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f846dc4-0f78-42cb-af77-badec769023e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.238464] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 733.306561] env[62813]: DEBUG oslo_vmware.rw_handles [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 733.366321] env[62813]: DEBUG oslo_vmware.rw_handles [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 733.366723] env[62813]: DEBUG oslo_vmware.rw_handles [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 733.642503] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f621ca1-73da-4724-af86-80ee3969ea42 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.651159] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcfeb2d-4c2e-403f-ac71-f3f8eaefe1b6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.682492] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4361ad7-5c3b-4f96-ae0b-ecd5a28aa3ad {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.690541] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fce8247-89e4-4563-8784-27e07c29cd93 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.706140] env[62813]: DEBUG nova.compute.provider_tree [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.718030] env[62813]: DEBUG nova.scheduler.client.report [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.736651] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.600s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.737188] env[62813]: ERROR nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 733.737188] env[62813]: Faults: ['InvalidArgument'] [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Traceback (most recent call last): [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self.driver.spawn(context, instance, image_meta, [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self._fetch_image_if_missing(context, vi) [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] image_cache(vi, tmp_image_ds_loc) [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] vm_util.copy_virtual_disk( [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] session._wait_for_task(vmdk_copy_task) [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] return self.wait_for_task(task_ref) [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] return evt.wait() [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] result = hub.switch() [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] return self.greenlet.switch() [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] self.f(*self.args, **self.kw) [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] raise exceptions.translate_fault(task_info.error) [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Faults: ['InvalidArgument'] [ 733.737188] env[62813]: ERROR nova.compute.manager [instance: c56debdf-ab77-4151-bc20-6973ae594d87] [ 733.738497] env[62813]: DEBUG nova.compute.utils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.739520] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Build of instance c56debdf-ab77-4151-bc20-6973ae594d87 was re-scheduled: A specified parameter was not correct: fileType [ 733.739520] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 733.739905] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 733.740142] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 733.740344] env[62813]: DEBUG nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 733.740541] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.144035] env[62813]: DEBUG nova.network.neutron [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.156401] env[62813]: INFO nova.compute.manager [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] [instance: c56debdf-ab77-4151-bc20-6973ae594d87] Took 0.42 seconds to deallocate network for instance. [ 734.279322] env[62813]: INFO nova.scheduler.client.report [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Deleted allocations for instance c56debdf-ab77-4151-bc20-6973ae594d87 [ 734.302030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8b89a3a1-37e3-411f-9b26-e98ef75ccc96 tempest-ServerExternalEventsTest-538456251 tempest-ServerExternalEventsTest-538456251-project-member] Lock "c56debdf-ab77-4151-bc20-6973ae594d87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.609s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.321027] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 734.400369] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.400650] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.402316] env[62813]: INFO nova.compute.claims [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.843262] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4004adc-4efa-4c66-9d74-719ab0138667 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.851843] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1c7121-2df2-4d96-ad79-3f2af1ec6849 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.883042] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fa6e35-6049-4323-9158-600f29d5dc6a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.890918] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963876be-a5fa-4788-94c5-3ccf047a49d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.904611] env[62813]: DEBUG nova.compute.provider_tree [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.913272] env[62813]: DEBUG nova.scheduler.client.report [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.932033] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.531s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.932177] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 734.967614] env[62813]: DEBUG nova.compute.utils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 734.971475] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 734.971475] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 734.979027] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 735.042138] env[62813]: DEBUG nova.policy [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '769b1244216d486ab6d0543106ab998b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ae484ce9d3544c2bbb91fa78da89162', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 735.051806] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 735.079020] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 735.079267] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 735.079431] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.079630] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 735.079774] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.079921] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 735.080149] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 735.080313] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 735.080487] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 735.080651] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 735.080826] env[62813]: DEBUG nova.virt.hardware [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 735.081723] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca1087c-b63b-4e56-a9ca-6e9bdc7a1988 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.090157] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15440096-742d-4109-983a-993755e38124 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.852403] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Successfully created port: 06245b26-2805-4bd6-9152-d3befc0d867a {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.960554] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Successfully updated port: 06245b26-2805-4bd6-9152-d3befc0d867a {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.982968] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "refresh_cache-356088a2-b55e-4ff1-9422-a53ab6830fc9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.983213] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquired lock "refresh_cache-356088a2-b55e-4ff1-9422-a53ab6830fc9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.983409] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.047950] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.067619] env[62813]: DEBUG nova.compute.manager [req-e7a73049-4165-4d22-89cb-c5efc2513b2e req-a85fed7c-aa86-4f46-aae8-fa1c68e21b76 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Received event network-vif-plugged-06245b26-2805-4bd6-9152-d3befc0d867a {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 737.068753] env[62813]: DEBUG oslo_concurrency.lockutils [req-e7a73049-4165-4d22-89cb-c5efc2513b2e req-a85fed7c-aa86-4f46-aae8-fa1c68e21b76 service nova] Acquiring lock "356088a2-b55e-4ff1-9422-a53ab6830fc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.068981] env[62813]: DEBUG oslo_concurrency.lockutils [req-e7a73049-4165-4d22-89cb-c5efc2513b2e req-a85fed7c-aa86-4f46-aae8-fa1c68e21b76 service nova] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.069180] env[62813]: DEBUG oslo_concurrency.lockutils [req-e7a73049-4165-4d22-89cb-c5efc2513b2e req-a85fed7c-aa86-4f46-aae8-fa1c68e21b76 service nova] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.069361] env[62813]: DEBUG nova.compute.manager [req-e7a73049-4165-4d22-89cb-c5efc2513b2e req-a85fed7c-aa86-4f46-aae8-fa1c68e21b76 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] No waiting events found dispatching network-vif-plugged-06245b26-2805-4bd6-9152-d3befc0d867a {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 737.069529] env[62813]: WARNING nova.compute.manager [req-e7a73049-4165-4d22-89cb-c5efc2513b2e req-a85fed7c-aa86-4f46-aae8-fa1c68e21b76 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Received unexpected event network-vif-plugged-06245b26-2805-4bd6-9152-d3befc0d867a for instance with vm_state building and task_state spawning. [ 737.315152] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Updating instance_info_cache with network_info: [{"id": "06245b26-2805-4bd6-9152-d3befc0d867a", "address": "fa:16:3e:b1:eb:35", "network": {"id": "ce98ef2f-75cb-46a3-ae88-b7ad4ab9d400", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1116204265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ae484ce9d3544c2bbb91fa78da89162", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0234c8-1a2d-47ff-9a72-2e7d35b49214", "external-id": "nsx-vlan-transportzone-788", "segmentation_id": 788, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06245b26-28", "ovs_interfaceid": "06245b26-2805-4bd6-9152-d3befc0d867a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.332816] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Releasing lock "refresh_cache-356088a2-b55e-4ff1-9422-a53ab6830fc9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.333141] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance network_info: |[{"id": "06245b26-2805-4bd6-9152-d3befc0d867a", "address": "fa:16:3e:b1:eb:35", "network": {"id": "ce98ef2f-75cb-46a3-ae88-b7ad4ab9d400", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1116204265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ae484ce9d3544c2bbb91fa78da89162", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0234c8-1a2d-47ff-9a72-2e7d35b49214", "external-id": "nsx-vlan-transportzone-788", "segmentation_id": 788, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06245b26-28", "ovs_interfaceid": "06245b26-2805-4bd6-9152-d3befc0d867a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 737.333566] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:eb:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0234c8-1a2d-47ff-9a72-2e7d35b49214', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06245b26-2805-4bd6-9152-d3befc0d867a', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.341272] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Creating folder: Project (9ae484ce9d3544c2bbb91fa78da89162). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 737.341870] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e605c11a-f335-4838-814f-99705c2db536 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.353035] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Created folder: Project (9ae484ce9d3544c2bbb91fa78da89162) in parent group-v840812. [ 737.353233] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Creating folder: Instances. Parent ref: group-v840851. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 737.353480] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc2abe5b-5a39-4666-afac-10152f2c932c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.364813] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Created folder: Instances in parent group-v840851. [ 737.365062] env[62813]: DEBUG oslo.service.loopingcall [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.365255] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 737.365469] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3597cc10-193a-4f88-8003-b882947d3dff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.388505] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.388505] env[62813]: value = "task-4267615" [ 737.388505] env[62813]: _type = "Task" [ 737.388505] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.398956] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267615, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.899982] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267615, 'name': CreateVM_Task, 'duration_secs': 0.317025} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.900178] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 737.900854] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.901041] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.901358] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.901614] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a28aa2-8805-4ebd-9d77-2023d2615e53 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.906717] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Waiting for the task: (returnval){ [ 737.906717] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52aedd97-fc05-91fe-86fa-682f22840f8f" [ 737.906717] env[62813]: _type = "Task" [ 737.906717] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.915021] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52aedd97-fc05-91fe-86fa-682f22840f8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.417773] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.418238] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.418238] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.339092] env[62813]: DEBUG nova.compute.manager [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Received event network-changed-06245b26-2805-4bd6-9152-d3befc0d867a {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 739.339309] env[62813]: DEBUG nova.compute.manager [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Refreshing instance network info cache due to event network-changed-06245b26-2805-4bd6-9152-d3befc0d867a. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 739.339567] env[62813]: DEBUG oslo_concurrency.lockutils [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] Acquiring lock "refresh_cache-356088a2-b55e-4ff1-9422-a53ab6830fc9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.339722] env[62813]: DEBUG oslo_concurrency.lockutils [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] Acquired lock "refresh_cache-356088a2-b55e-4ff1-9422-a53ab6830fc9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.339888] env[62813]: DEBUG nova.network.neutron [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Refreshing network info cache for port 06245b26-2805-4bd6-9152-d3befc0d867a {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.917187] env[62813]: DEBUG nova.network.neutron [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Updated VIF entry in instance network info cache for port 06245b26-2805-4bd6-9152-d3befc0d867a. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 739.918484] env[62813]: DEBUG nova.network.neutron [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Updating instance_info_cache with network_info: [{"id": "06245b26-2805-4bd6-9152-d3befc0d867a", "address": "fa:16:3e:b1:eb:35", "network": {"id": "ce98ef2f-75cb-46a3-ae88-b7ad4ab9d400", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1116204265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ae484ce9d3544c2bbb91fa78da89162", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0234c8-1a2d-47ff-9a72-2e7d35b49214", "external-id": "nsx-vlan-transportzone-788", "segmentation_id": 788, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06245b26-28", "ovs_interfaceid": "06245b26-2805-4bd6-9152-d3befc0d867a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.939039] env[62813]: DEBUG oslo_concurrency.lockutils [req-faa04a0d-37cb-45a5-8fc0-b02b5e3b4190 req-02cfa528-5b1c-44d7-93c0-07c9c90e3fb4 service nova] Releasing lock "refresh_cache-356088a2-b55e-4ff1-9422-a53ab6830fc9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.684379] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.684679] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.164843] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.165101] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.165382] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.165483] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 773.164316] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.159591] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.186798] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.187151] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 774.187151] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 774.208739] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.208908] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209055] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209189] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209313] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209434] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209556] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209674] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209859] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.209906] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 774.210031] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 774.210518] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.210697] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.210867] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.223442] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.223661] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.223827] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.223986] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 774.225086] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c1ddec-26d4-40b2-9297-9d6d1615d066 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.235678] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190d6c80-94ec-4074-b776-8b17eed34abe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.250440] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19490f66-e1bc-4736-8737-5c8bab2f1844 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.257905] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6547063c-cee4-4dab-9864-fa35eeb118bc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.296669] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180778MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 774.296930] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.297283] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.395762] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.395945] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396127] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396223] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396345] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 09aa702f-a28c-429b-83d9-378be8606a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396465] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396585] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396706] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396825] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.396941] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 774.408836] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.419643] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51817412-7e0a-48fe-8f8e-766eea45e60c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.429876] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 52329a92-b534-4811-b117-2041b125f4c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.440011] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b9e1cb26-df61-44aa-952f-e50e24766a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.450667] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 338a113c-1d04-4243-8500-fcc6d458d3ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.461327] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 70a8650c-112c-44c2-b3cb-1b8cab557cac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.472160] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 3684a5c7-4998-4d5b-8cb6-7df3a9bf9270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.482794] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.495353] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 704614e0-aaa3-48b6-8208-47af7ca0f367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.506308] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b845396e-1641-4668-b687-348f1ee8b6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.518248] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e783b92-0668-4e70-9848-4b4320318603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.529492] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.540504] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.550573] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 097bae8e-614d-4322-b767-d56e0dc1b658 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.562700] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 22549912-2253-42bb-b2d7-8d0512c2a9d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.574266] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7cdd84be-1e0a-4e4a-9e40-b4d589f08914 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.584554] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f863dfb3-98d5-473b-9e41-85984e350070 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.594740] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 952194e3-6318-4ecc-8d48-bda3811c4d49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.605182] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5b935b76-027b-4b4a-a61b-3e4cc2f36c08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.616928] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 8852d058-a494-47e4-977d-289b5126f7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.628036] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.628300] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 774.628453] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 775.047560] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7006c7d7-1256-4a87-a348-8198aaba99de {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.058387] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5525aa-dad1-4872-990b-82049931718f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.104409] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c63211-bdd9-441a-87da-5a532874caa6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.114787] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a95b11-4a26-44c9-b350-294aa43c8027 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.136020] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.146669] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.164024] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 775.164024] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.867s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.164141] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.557858] env[62813]: WARNING oslo_vmware.rw_handles [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 781.557858] env[62813]: ERROR oslo_vmware.rw_handles [ 781.558485] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 781.560180] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 781.560338] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Copying Virtual Disk [datastore2] vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/120f46eb-8ee2-42ad-9a03-540ba4112c18/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 781.560628] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e56ccec-08ed-4619-82a1-824abacd32f2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.569598] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Waiting for the task: (returnval){ [ 781.569598] env[62813]: value = "task-4267616" [ 781.569598] env[62813]: _type = "Task" [ 781.569598] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.577842] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Task: {'id': task-4267616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.080566] env[62813]: DEBUG oslo_vmware.exceptions [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 782.081689] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.081689] env[62813]: ERROR nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.081689] env[62813]: Faults: ['InvalidArgument'] [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Traceback (most recent call last): [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] yield resources [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self.driver.spawn(context, instance, image_meta, [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self._fetch_image_if_missing(context, vi) [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] image_cache(vi, tmp_image_ds_loc) [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] vm_util.copy_virtual_disk( [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] session._wait_for_task(vmdk_copy_task) [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] return self.wait_for_task(task_ref) [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] return evt.wait() [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] result = hub.switch() [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] return self.greenlet.switch() [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self.f(*self.args, **self.kw) [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] raise exceptions.translate_fault(task_info.error) [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Faults: ['InvalidArgument'] [ 782.081689] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] [ 782.081689] env[62813]: INFO nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Terminating instance [ 782.083380] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.083597] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.083841] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64d11ecf-a10b-4c84-b8b3-89cd0b355b46 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.087314] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 782.087533] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 782.088341] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724b53db-ad8c-4316-bf90-fe1ff53a5224 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.092494] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.092689] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 782.093428] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-470e9fca-9729-4cb8-ad6d-5a2c27625151 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.097694] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 782.098285] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59772940-29e9-43e5-913b-633491fa7dfa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.101195] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Waiting for the task: (returnval){ [ 782.101195] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5226a7dd-5c59-1655-197e-6ddd78eb5089" [ 782.101195] env[62813]: _type = "Task" [ 782.101195] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.109484] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5226a7dd-5c59-1655-197e-6ddd78eb5089, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.178948] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 782.179227] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 782.179370] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Deleting the datastore file [datastore2] 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.179648] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3166c056-4a38-4a05-a705-335c13b77d21 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.186158] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Waiting for the task: (returnval){ [ 782.186158] env[62813]: value = "task-4267618" [ 782.186158] env[62813]: _type = "Task" [ 782.186158] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.195062] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Task: {'id': task-4267618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.612882] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 782.613241] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Creating directory with path [datastore2] vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.613523] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c339d5d7-0137-4027-b419-f5dbedea8385 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.626351] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Created directory with path [datastore2] vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.626555] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Fetch image to [datastore2] vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 782.626727] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 782.627515] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524b459e-85be-4a44-b43d-de2959bb63e3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.635284] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57504c0e-82ce-4981-b1a3-17aa46d93380 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.645531] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03870ec-38c9-4ba1-abe8-f442df1729f1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.679284] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bd63e6-99a0-4aa0-b270-0410e3737e3c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.686258] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7b9b5d11-55c3-49c8-b1f3-32d1d458c405 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.696987] env[62813]: DEBUG oslo_vmware.api [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Task: {'id': task-4267618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0695} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.697442] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.697554] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 782.697714] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 782.697950] env[62813]: INFO nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 782.700387] env[62813]: DEBUG nova.compute.claims [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 782.700648] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.700975] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.714533] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 782.776439] env[62813]: DEBUG oslo_vmware.rw_handles [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 782.836225] env[62813]: DEBUG oslo_vmware.rw_handles [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 782.836470] env[62813]: DEBUG oslo_vmware.rw_handles [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 783.204442] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb36cd9-3517-471e-b889-1584e0a3e51f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.214109] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e182742e-8a6c-4381-a92e-0237a0e7be23 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.247625] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7414412a-e385-4afb-9ac7-32887011640d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.256314] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f66a467-b73d-478b-a903-a7b19a8220ff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.272022] env[62813]: DEBUG nova.compute.provider_tree [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.281664] env[62813]: DEBUG nova.scheduler.client.report [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 783.298928] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.598s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.299514] env[62813]: ERROR nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 783.299514] env[62813]: Faults: ['InvalidArgument'] [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Traceback (most recent call last): [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self.driver.spawn(context, instance, image_meta, [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self._fetch_image_if_missing(context, vi) [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] image_cache(vi, tmp_image_ds_loc) [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] vm_util.copy_virtual_disk( [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] session._wait_for_task(vmdk_copy_task) [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] return self.wait_for_task(task_ref) [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] return evt.wait() [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] result = hub.switch() [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] return self.greenlet.switch() [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] self.f(*self.args, **self.kw) [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] raise exceptions.translate_fault(task_info.error) [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Faults: ['InvalidArgument'] [ 783.299514] env[62813]: ERROR nova.compute.manager [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] [ 783.300430] env[62813]: DEBUG nova.compute.utils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 783.303684] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Build of instance 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 was re-scheduled: A specified parameter was not correct: fileType [ 783.303684] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 783.304097] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 783.304281] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 783.304455] env[62813]: DEBUG nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 783.304619] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.656263] env[62813]: DEBUG nova.network.neutron [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.671202] env[62813]: INFO nova.compute.manager [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] [instance: 4e911f12-5d9c-4713-b1e0-4d87a589a9d8] Took 0.36 seconds to deallocate network for instance. [ 783.779402] env[62813]: INFO nova.scheduler.client.report [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Deleted allocations for instance 4e911f12-5d9c-4713-b1e0-4d87a589a9d8 [ 783.802375] env[62813]: DEBUG oslo_concurrency.lockutils [None req-657d982d-27a4-4023-a12a-10bba4584243 tempest-ImagesOneServerTestJSON-1434932989 tempest-ImagesOneServerTestJSON-1434932989-project-member] Lock "4e911f12-5d9c-4713-b1e0-4d87a589a9d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.872s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.817823] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 783.878900] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.879543] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.881258] env[62813]: INFO nova.compute.claims [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.320124] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18e5ee0-dbd8-4546-a42d-780a96937407 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.328441] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefcb908-1143-4f2d-86ea-7c1a6a4b9a33 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.361238] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8129aad-acc7-428e-bd54-b90467bf7316 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.369193] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10354957-d69e-43e7-9813-00d96fd25c13 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.382846] env[62813]: DEBUG nova.compute.provider_tree [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.392115] env[62813]: DEBUG nova.scheduler.client.report [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.406214] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.527s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.406726] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 784.448630] env[62813]: DEBUG nova.compute.utils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.451449] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 784.451449] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 784.459678] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 784.541107] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 784.544853] env[62813]: DEBUG nova.policy [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '271b1ca4133444d6a5e3cc45933cd269', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d18909325d74960ab3865346549348b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 784.575011] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.575373] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.575548] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.576376] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.576616] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.576816] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.577110] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.577338] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.577683] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.577973] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.578187] env[62813]: DEBUG nova.virt.hardware [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.579109] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b1adac-76ac-4456-bc90-4e1ab1fe2044 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.588995] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5421c380-65c6-444d-9d86-9795b2612de5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.022033] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Successfully created port: 5b59fdbb-59c8-4f01-82ae-d785f0a3d187 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.349487] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "65d620ad-0863-4947-945e-0e4b3c01d3a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.871533] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Successfully updated port: 5b59fdbb-59c8-4f01-82ae-d785f0a3d187 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 785.882801] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "refresh_cache-489b821e-f7d0-446f-8197-550c808e5a99" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.882973] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquired lock "refresh_cache-489b821e-f7d0-446f-8197-550c808e5a99" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.883184] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 785.956107] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.445497] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Updating instance_info_cache with network_info: [{"id": "5b59fdbb-59c8-4f01-82ae-d785f0a3d187", "address": "fa:16:3e:cb:b0:7c", "network": {"id": "5af48442-727e-4f96-a5c0-030e4ef040e6", "bridge": "br-int", "label": "tempest-ServersTestJSON-114279647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d18909325d74960ab3865346549348b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b59fdbb-59", "ovs_interfaceid": "5b59fdbb-59c8-4f01-82ae-d785f0a3d187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.459945] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Releasing lock "refresh_cache-489b821e-f7d0-446f-8197-550c808e5a99" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.460338] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance network_info: |[{"id": "5b59fdbb-59c8-4f01-82ae-d785f0a3d187", "address": "fa:16:3e:cb:b0:7c", "network": {"id": "5af48442-727e-4f96-a5c0-030e4ef040e6", "bridge": "br-int", "label": "tempest-ServersTestJSON-114279647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d18909325d74960ab3865346549348b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b59fdbb-59", "ovs_interfaceid": "5b59fdbb-59c8-4f01-82ae-d785f0a3d187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.460758] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:b0:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b59fdbb-59c8-4f01-82ae-d785f0a3d187', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.468586] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Creating folder: Project (2d18909325d74960ab3865346549348b). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 786.470294] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03828e4a-83b1-45ea-a979-b3a8d009d8f0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.486495] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Created folder: Project (2d18909325d74960ab3865346549348b) in parent group-v840812. [ 786.486495] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Creating folder: Instances. Parent ref: group-v840854. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 786.486495] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fda70638-6750-4b9f-af4d-8b8104552ec2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.498033] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Created folder: Instances in parent group-v840854. [ 786.498033] env[62813]: DEBUG oslo.service.loopingcall [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.498033] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 786.499769] env[62813]: DEBUG nova.compute.manager [req-af688de9-32a2-49f1-b405-46e605c3efdc req-b6e8005a-9337-42d9-a021-549dd5656f71 service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Received event network-vif-plugged-5b59fdbb-59c8-4f01-82ae-d785f0a3d187 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 786.500313] env[62813]: DEBUG oslo_concurrency.lockutils [req-af688de9-32a2-49f1-b405-46e605c3efdc req-b6e8005a-9337-42d9-a021-549dd5656f71 service nova] Acquiring lock "489b821e-f7d0-446f-8197-550c808e5a99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.500754] env[62813]: DEBUG oslo_concurrency.lockutils [req-af688de9-32a2-49f1-b405-46e605c3efdc req-b6e8005a-9337-42d9-a021-549dd5656f71 service nova] Lock "489b821e-f7d0-446f-8197-550c808e5a99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.501075] env[62813]: DEBUG oslo_concurrency.lockutils [req-af688de9-32a2-49f1-b405-46e605c3efdc req-b6e8005a-9337-42d9-a021-549dd5656f71 service nova] Lock "489b821e-f7d0-446f-8197-550c808e5a99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.503031] env[62813]: DEBUG nova.compute.manager [req-af688de9-32a2-49f1-b405-46e605c3efdc req-b6e8005a-9337-42d9-a021-549dd5656f71 service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] No waiting events found dispatching network-vif-plugged-5b59fdbb-59c8-4f01-82ae-d785f0a3d187 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 786.503031] env[62813]: WARNING nova.compute.manager [req-af688de9-32a2-49f1-b405-46e605c3efdc req-b6e8005a-9337-42d9-a021-549dd5656f71 service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Received unexpected event network-vif-plugged-5b59fdbb-59c8-4f01-82ae-d785f0a3d187 for instance with vm_state building and task_state spawning. [ 786.503031] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ae42e4b-2fc7-42bf-bf7d-fcb1c37e9ee2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.522996] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.522996] env[62813]: value = "task-4267621" [ 786.522996] env[62813]: _type = "Task" [ 786.522996] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.534243] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267621, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.035726] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267621, 'name': CreateVM_Task, 'duration_secs': 0.330902} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.036219] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 787.037434] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.037759] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.038588] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 787.038992] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6679bf2e-effd-444a-9af4-249aaa8910e6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.045186] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Waiting for the task: (returnval){ [ 787.045186] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ad8f0a-edf8-310f-733d-66b3c6c1eeb6" [ 787.045186] env[62813]: _type = "Task" [ 787.045186] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.054712] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ad8f0a-edf8-310f-733d-66b3c6c1eeb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.559212] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.559618] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.559618] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.877428] env[62813]: DEBUG nova.compute.manager [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Received event network-changed-5b59fdbb-59c8-4f01-82ae-d785f0a3d187 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 788.877680] env[62813]: DEBUG nova.compute.manager [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Refreshing instance network info cache due to event network-changed-5b59fdbb-59c8-4f01-82ae-d785f0a3d187. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 788.877899] env[62813]: DEBUG oslo_concurrency.lockutils [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] Acquiring lock "refresh_cache-489b821e-f7d0-446f-8197-550c808e5a99" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.878079] env[62813]: DEBUG oslo_concurrency.lockutils [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] Acquired lock "refresh_cache-489b821e-f7d0-446f-8197-550c808e5a99" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.878303] env[62813]: DEBUG nova.network.neutron [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Refreshing network info cache for port 5b59fdbb-59c8-4f01-82ae-d785f0a3d187 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 789.416400] env[62813]: DEBUG nova.network.neutron [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Updated VIF entry in instance network info cache for port 5b59fdbb-59c8-4f01-82ae-d785f0a3d187. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 789.416774] env[62813]: DEBUG nova.network.neutron [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Updating instance_info_cache with network_info: [{"id": "5b59fdbb-59c8-4f01-82ae-d785f0a3d187", "address": "fa:16:3e:cb:b0:7c", "network": {"id": "5af48442-727e-4f96-a5c0-030e4ef040e6", "bridge": "br-int", "label": "tempest-ServersTestJSON-114279647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d18909325d74960ab3865346549348b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b59fdbb-59", "ovs_interfaceid": "5b59fdbb-59c8-4f01-82ae-d785f0a3d187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.427142] env[62813]: DEBUG oslo_concurrency.lockutils [req-36d250c5-b177-4d91-81b6-47022eb61ed8 req-76afe37b-77c8-43e3-9df4-09bbd1d041db service nova] Releasing lock "refresh_cache-489b821e-f7d0-446f-8197-550c808e5a99" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.677583] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.847259] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.847594] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.998028] env[62813]: DEBUG oslo_concurrency.lockutils [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "012bbc43-f61f-4aef-bd66-32fbe66f8374" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.468182] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "09aa702f-a28c-429b-83d9-378be8606a29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.642673] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.048591] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "9a448d2b-0dee-4a90-b131-e6ada542f342" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.130497] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "aa76585b-55a8-437c-8dea-7731d85a3b82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.048607] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "76b0e03d-9636-4328-bfd5-17c434cfae72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.386938] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "489b821e-f7d0-446f-8197-550c808e5a99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.562950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.052590] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.052590] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.164038] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.164316] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 829.180367] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] There are 0 instances to clean {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 829.183366] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.183366] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances with incomplete migration {{(pid=62813) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 829.194537] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 830.205066] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.163713] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.203046] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "40655a50-5c68-4141-be93-f7a39aa5a168" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.203046] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "40655a50-5c68-4141-be93-f7a39aa5a168" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.577681] env[62813]: WARNING oslo_vmware.rw_handles [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 831.577681] env[62813]: ERROR oslo_vmware.rw_handles [ 831.578496] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 831.580644] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 831.580975] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Copying Virtual Disk [datastore2] vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/ccb431f3-9dec-459c-885c-bb0c3f86dab2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 831.581384] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb0ed7ee-62e3-4fed-9445-779bcfce8692 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.590585] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Waiting for the task: (returnval){ [ 831.590585] env[62813]: value = "task-4267622" [ 831.590585] env[62813]: _type = "Task" [ 831.590585] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.600771] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Task: {'id': task-4267622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.105233] env[62813]: DEBUG oslo_vmware.exceptions [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 832.105539] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.106148] env[62813]: ERROR nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 832.106148] env[62813]: Faults: ['InvalidArgument'] [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Traceback (most recent call last): [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] yield resources [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self.driver.spawn(context, instance, image_meta, [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self._fetch_image_if_missing(context, vi) [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] image_cache(vi, tmp_image_ds_loc) [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] vm_util.copy_virtual_disk( [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] session._wait_for_task(vmdk_copy_task) [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] return self.wait_for_task(task_ref) [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] return evt.wait() [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] result = hub.switch() [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] return self.greenlet.switch() [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self.f(*self.args, **self.kw) [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] raise exceptions.translate_fault(task_info.error) [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Faults: ['InvalidArgument'] [ 832.106148] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] [ 832.108214] env[62813]: INFO nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Terminating instance [ 832.108550] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.108550] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.109110] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 832.109323] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.109541] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a14da6c9-1da4-4cc1-9eda-82c2dc6408e0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.112020] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2559de60-7214-4a80-a39b-cdeaeb9ea4a7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.125550] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 832.130493] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a112c90-a92e-41eb-b435-3f961e2ea2da {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.132458] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.132660] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 832.133506] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fa11d7d-1e08-4499-b34d-57d9ca592087 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.141038] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Waiting for the task: (returnval){ [ 832.141038] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]527efb8b-30ec-8749-7241-9f5c50996aa9" [ 832.141038] env[62813]: _type = "Task" [ 832.141038] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.154746] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]527efb8b-30ec-8749-7241-9f5c50996aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.163555] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.163731] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 832.227025] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 832.227025] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 832.227025] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Deleting the datastore file [datastore2] 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.227025] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f59bd981-3326-458c-b75f-0fabece5bc85 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.234686] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Waiting for the task: (returnval){ [ 832.234686] env[62813]: value = "task-4267624" [ 832.234686] env[62813]: _type = "Task" [ 832.234686] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.246537] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Task: {'id': task-4267624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.651907] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 832.652314] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Creating directory with path [datastore2] vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.652453] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-652a1d5c-6906-4734-b3c5-ae7d5f525371 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.668257] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Created directory with path [datastore2] vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.668502] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Fetch image to [datastore2] vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 832.668676] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 832.669582] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b02ad2-f611-468f-8ac3-359fa033de69 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.678953] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bf858b-e8b6-40e7-99dd-42aaec6000ee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.689961] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0f9055-7654-45be-a212-2617c6a18b46 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.730296] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8e6517-fbb7-4a3a-84be-d4da39049ba5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.742320] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c5ade3cc-c87a-4c3f-b4eb-9f666a1026a1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.751171] env[62813]: DEBUG oslo_vmware.api [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Task: {'id': task-4267624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083753} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.751171] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.751171] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 832.751171] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 832.751171] env[62813]: INFO nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Took 0.64 seconds to destroy the instance on the hypervisor. [ 832.752887] env[62813]: DEBUG nova.compute.claims [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 832.753120] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.753375] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.769592] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 832.857362] env[62813]: DEBUG oslo_vmware.rw_handles [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 832.922099] env[62813]: DEBUG oslo_vmware.rw_handles [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 832.922336] env[62813]: DEBUG oslo_vmware.rw_handles [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 833.166214] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.393110] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02921f5-42db-4d9b-b703-6d716cd224eb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.401185] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e634466-3874-4314-bf7c-e0002abde1fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.431734] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a76162f-0819-4f3a-abb1-e34225f3429a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.438917] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89d3a65-1060-476b-be59-109d48ebfdf6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.452471] env[62813]: DEBUG nova.compute.provider_tree [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.461675] env[62813]: DEBUG nova.scheduler.client.report [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 833.480313] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.727s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.480798] env[62813]: ERROR nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 833.480798] env[62813]: Faults: ['InvalidArgument'] [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Traceback (most recent call last): [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self.driver.spawn(context, instance, image_meta, [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self._fetch_image_if_missing(context, vi) [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] image_cache(vi, tmp_image_ds_loc) [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] vm_util.copy_virtual_disk( [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] session._wait_for_task(vmdk_copy_task) [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] return self.wait_for_task(task_ref) [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] return evt.wait() [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] result = hub.switch() [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] return self.greenlet.switch() [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] self.f(*self.args, **self.kw) [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] raise exceptions.translate_fault(task_info.error) [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Faults: ['InvalidArgument'] [ 833.480798] env[62813]: ERROR nova.compute.manager [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] [ 833.481594] env[62813]: DEBUG nova.compute.utils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 833.482973] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Build of instance 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 was re-scheduled: A specified parameter was not correct: fileType [ 833.482973] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 833.483358] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 833.483530] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 833.483684] env[62813]: DEBUG nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 833.483848] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.899129] env[62813]: DEBUG nova.network.neutron [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.911342] env[62813]: INFO nova.compute.manager [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Took 0.43 seconds to deallocate network for instance. [ 834.045069] env[62813]: INFO nova.scheduler.client.report [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Deleted allocations for instance 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 [ 834.078310] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62b7ffde-2383-46ab-b980-9d92a77d9cc0 tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.020s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.080822] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 41.403s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.080944] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Acquiring lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.081165] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.081330] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.085177] env[62813]: INFO nova.compute.manager [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Terminating instance [ 834.087352] env[62813]: DEBUG nova.compute.manager [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 834.087508] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 834.087827] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-933da5b7-e33c-4605-9c8a-98f5f432b72b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.097606] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28c876c-227d-4d68-b3ec-77729421c921 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.118480] env[62813]: DEBUG nova.compute.manager [None req-b71de503-8c74-4fab-8038-bdcb0be7868c tempest-ImagesNegativeTestJSON-1418842950 tempest-ImagesNegativeTestJSON-1418842950-project-member] [instance: 51817412-7e0a-48fe-8f8e-766eea45e60c] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.131631] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77 could not be found. [ 834.132486] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 834.132486] env[62813]: INFO nova.compute.manager [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Took 0.04 seconds to destroy the instance on the hypervisor. [ 834.132697] env[62813]: DEBUG oslo.service.loopingcall [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.133306] env[62813]: DEBUG nova.compute.manager [-] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 834.133468] env[62813]: DEBUG nova.network.neutron [-] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 834.163242] env[62813]: DEBUG nova.network.neutron [-] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.164810] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 834.165008] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 834.165204] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 834.167643] env[62813]: DEBUG nova.compute.manager [None req-b71de503-8c74-4fab-8038-bdcb0be7868c tempest-ImagesNegativeTestJSON-1418842950 tempest-ImagesNegativeTestJSON-1418842950-project-member] [instance: 51817412-7e0a-48fe-8f8e-766eea45e60c] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 834.185663] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.185836] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.185971] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.187141] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.187386] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.187580] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.187735] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.188060] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.188243] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 834.188404] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 834.190750] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 834.194437] env[62813]: INFO nova.compute.manager [-] [instance: 7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77] Took 0.06 seconds to deallocate network for instance. [ 834.195672] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 834.195672] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6425a6a1-1253-4318-adce-52a7ba85b69a tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "7f7b3544-6e9f-493a-b190-537d6c3b7979" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.195785] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6425a6a1-1253-4318-adce-52a7ba85b69a tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "7f7b3544-6e9f-493a-b190-537d6c3b7979" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.214373] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b71de503-8c74-4fab-8038-bdcb0be7868c tempest-ImagesNegativeTestJSON-1418842950 tempest-ImagesNegativeTestJSON-1418842950-project-member] Lock "51817412-7e0a-48fe-8f8e-766eea45e60c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.246s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.235135] env[62813]: DEBUG nova.compute.manager [None req-1f435e39-67ab-414c-8190-340cf106387e tempest-ServersWithSpecificFlavorTestJSON-1247154468 tempest-ServersWithSpecificFlavorTestJSON-1247154468-project-member] [instance: 52329a92-b534-4811-b117-2041b125f4c7] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.308956] env[62813]: DEBUG nova.compute.manager [None req-1f435e39-67ab-414c-8190-340cf106387e tempest-ServersWithSpecificFlavorTestJSON-1247154468 tempest-ServersWithSpecificFlavorTestJSON-1247154468-project-member] [instance: 52329a92-b534-4811-b117-2041b125f4c7] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 834.334871] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1f435e39-67ab-414c-8190-340cf106387e tempest-ServersWithSpecificFlavorTestJSON-1247154468 tempest-ServersWithSpecificFlavorTestJSON-1247154468-project-member] Lock "52329a92-b534-4811-b117-2041b125f4c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.634s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.350119] env[62813]: DEBUG nova.compute.manager [None req-a7c08035-f827-4cc4-8e06-3d7920649966 tempest-InstanceActionsTestJSON-1307636854 tempest-InstanceActionsTestJSON-1307636854-project-member] [instance: b9e1cb26-df61-44aa-952f-e50e24766a6f] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.388719] env[62813]: DEBUG nova.compute.manager [None req-a7c08035-f827-4cc4-8e06-3d7920649966 tempest-InstanceActionsTestJSON-1307636854 tempest-InstanceActionsTestJSON-1307636854-project-member] [instance: b9e1cb26-df61-44aa-952f-e50e24766a6f] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 834.395708] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ca9f9e2a-1f76-4b9d-909b-6952261066ca tempest-ServerDiagnosticsNegativeTest-213310705 tempest-ServerDiagnosticsNegativeTest-213310705-project-member] Lock "7ab3a69a-9893-4ed2-b3e7-13ed81ee7c77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.315s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.415293] env[62813]: DEBUG oslo_concurrency.lockutils [None req-a7c08035-f827-4cc4-8e06-3d7920649966 tempest-InstanceActionsTestJSON-1307636854 tempest-InstanceActionsTestJSON-1307636854-project-member] Lock "b9e1cb26-df61-44aa-952f-e50e24766a6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.898s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.426764] env[62813]: DEBUG nova.compute.manager [None req-8fcb9a0e-ecaa-4177-a851-6cb6238a3e62 tempest-InstanceActionsNegativeTestJSON-1061991352 tempest-InstanceActionsNegativeTestJSON-1061991352-project-member] [instance: 338a113c-1d04-4243-8500-fcc6d458d3ed] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.451694] env[62813]: DEBUG nova.compute.manager [None req-8fcb9a0e-ecaa-4177-a851-6cb6238a3e62 tempest-InstanceActionsNegativeTestJSON-1061991352 tempest-InstanceActionsNegativeTestJSON-1061991352-project-member] [instance: 338a113c-1d04-4243-8500-fcc6d458d3ed] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 834.474849] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8fcb9a0e-ecaa-4177-a851-6cb6238a3e62 tempest-InstanceActionsNegativeTestJSON-1061991352 tempest-InstanceActionsNegativeTestJSON-1061991352-project-member] Lock "338a113c-1d04-4243-8500-fcc6d458d3ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.738s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.499058] env[62813]: DEBUG nova.compute.manager [None req-4495db5c-fe45-4331-b47d-784a83346216 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: 70a8650c-112c-44c2-b3cb-1b8cab557cac] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.528758] env[62813]: DEBUG nova.compute.manager [None req-4495db5c-fe45-4331-b47d-784a83346216 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: 70a8650c-112c-44c2-b3cb-1b8cab557cac] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 834.552474] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4495db5c-fe45-4331-b47d-784a83346216 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "70a8650c-112c-44c2-b3cb-1b8cab557cac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.380s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.565914] env[62813]: DEBUG nova.compute.manager [None req-15d89fd7-63ab-497d-98ba-10eff0a7ed77 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 3684a5c7-4998-4d5b-8cb6-7df3a9bf9270] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.600957] env[62813]: DEBUG nova.compute.manager [None req-15d89fd7-63ab-497d-98ba-10eff0a7ed77 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 3684a5c7-4998-4d5b-8cb6-7df3a9bf9270] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 834.629048] env[62813]: DEBUG oslo_concurrency.lockutils [None req-15d89fd7-63ab-497d-98ba-10eff0a7ed77 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "3684a5c7-4998-4d5b-8cb6-7df3a9bf9270" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.747s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.641054] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 834.713084] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.713776] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.715414] env[62813]: INFO nova.compute.claims [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.802776] env[62813]: DEBUG nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Refreshing inventories for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 834.821111] env[62813]: DEBUG nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Updating ProviderTree inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 834.821354] env[62813]: DEBUG nova.compute.provider_tree [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.824187] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.824426] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.836902] env[62813]: DEBUG nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Refreshing aggregate associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, aggregates: None {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 834.862130] env[62813]: DEBUG nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Refreshing trait associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 835.197259] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.318770] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320b8f64-4b39-44c8-a1dd-dc2292d2134c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.330435] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c17f2e-2064-4044-8778-094245637bee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.370573] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aed2d9-fb9a-4109-bdd8-b75e31a1f19c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.380242] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e7b62f-3a2c-4cc6-80c8-764c6b95e3c9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.397015] env[62813]: DEBUG nova.compute.provider_tree [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.411507] env[62813]: DEBUG nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.429160] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.716s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.429588] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 835.485450] env[62813]: DEBUG nova.compute.utils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.490022] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 835.490022] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 835.499321] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 835.581985] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 835.616816] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.617266] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.617561] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.617899] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.618196] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.618497] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.618962] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.619296] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.619609] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.619952] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.620305] env[62813]: DEBUG nova.virt.hardware [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.622044] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee69eed-a59d-4180-8f9a-5e64918825b0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.631326] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0eeade-c6fa-45eb-82b9-907cf1ee8155 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.637469] env[62813]: DEBUG nova.policy [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11fff818cbe24d01b6233bb09f336531', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6cf8ad1c15764dc8a4a6ca3e0a2dd599', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 836.163933] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.178692] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.179435] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.179652] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.179857] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 836.181024] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5c807e-44e2-4676-8b48-7d4d9df233f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.192022] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b21e66-e3e8-49ef-8d0f-7c900d38d4e7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.208965] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257038d2-74d7-4b20-8497-e49a28a16c70 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.216729] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a327d55-e7cc-4c08-acd2-b42715bf9538 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.256263] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180754MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 836.256435] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.256660] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.369597] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.369772] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.369903] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 09aa702f-a28c-429b-83d9-378be8606a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.370041] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.370958] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.371144] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.371284] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.371409] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.371532] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.371652] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 836.388040] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b845396e-1641-4668-b687-348f1ee8b6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.405185] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e783b92-0668-4e70-9848-4b4320318603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.416924] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.429189] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.448412] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 097bae8e-614d-4322-b767-d56e0dc1b658 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.461965] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 22549912-2253-42bb-b2d7-8d0512c2a9d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.474919] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7cdd84be-1e0a-4e4a-9e40-b4d589f08914 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.486939] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f863dfb3-98d5-473b-9e41-85984e350070 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.499302] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 952194e3-6318-4ecc-8d48-bda3811c4d49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.510357] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5b935b76-027b-4b4a-a61b-3e4cc2f36c08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.521991] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 8852d058-a494-47e4-977d-289b5126f7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.532837] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.544552] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.557840] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.568968] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 40655a50-5c68-4141-be93-f7a39aa5a168 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.580583] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 836.580871] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 836.581039] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 836.712602] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Successfully created port: c718ea23-f1d0-462e-a681-ff2b44ace946 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.011769] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d123555-7b20-40f5-8382-3ddc88ff4b94 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "6ed29c0d-710c-4f2f-b321-bbd8d253f918" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.012023] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d123555-7b20-40f5-8382-3ddc88ff4b94 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "6ed29c0d-710c-4f2f-b321-bbd8d253f918" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.035993] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7001be-a3a2-4556-880d-23a6100bd9ea {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.044474] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878b7344-451a-4b22-9556-73f20a6d9c3f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.075122] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78aee1fe-8b24-4816-b695-216b4043f0a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.083933] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa818a88-4147-457f-ac20-f24470c3211d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.105738] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.115412] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.141880] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 837.142150] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.885s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.689179] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Successfully updated port: c718ea23-f1d0-462e-a681-ff2b44ace946 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.701706] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "refresh_cache-b946bdda-a8a4-4a82-b2f7-99637fcae21c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.701862] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquired lock "refresh_cache-b946bdda-a8a4-4a82-b2f7-99637fcae21c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.702025] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.769979] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.058240] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Updating instance_info_cache with network_info: [{"id": "c718ea23-f1d0-462e-a681-ff2b44ace946", "address": "fa:16:3e:7b:f5:1b", "network": {"id": "64ae8cfe-bdf1-40c7-a5b5-375d2e4de176", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2127775475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cf8ad1c15764dc8a4a6ca3e0a2dd599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc718ea23-f1", "ovs_interfaceid": "c718ea23-f1d0-462e-a681-ff2b44ace946", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.077177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Releasing lock "refresh_cache-b946bdda-a8a4-4a82-b2f7-99637fcae21c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.077506] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance network_info: |[{"id": "c718ea23-f1d0-462e-a681-ff2b44ace946", "address": "fa:16:3e:7b:f5:1b", "network": {"id": "64ae8cfe-bdf1-40c7-a5b5-375d2e4de176", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2127775475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cf8ad1c15764dc8a4a6ca3e0a2dd599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc718ea23-f1", "ovs_interfaceid": "c718ea23-f1d0-462e-a681-ff2b44ace946", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 838.077932] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:f5:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c718ea23-f1d0-462e-a681-ff2b44ace946', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.089562] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Creating folder: Project (6cf8ad1c15764dc8a4a6ca3e0a2dd599). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 838.090253] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b89b94d-cf26-4efa-afdd-663998b1edca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.102757] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Created folder: Project (6cf8ad1c15764dc8a4a6ca3e0a2dd599) in parent group-v840812. [ 838.102971] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Creating folder: Instances. Parent ref: group-v840857. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 838.105289] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10a375e8-3c93-4526-abed-acf39c2b2a71 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.114282] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Created folder: Instances in parent group-v840857. [ 838.115058] env[62813]: DEBUG oslo.service.loopingcall [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.115058] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 838.115058] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e1a857a-c4ba-457e-87c5-cb5228c6f46c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.139737] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.139737] env[62813]: value = "task-4267627" [ 838.139737] env[62813]: _type = "Task" [ 838.139737] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.151542] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267627, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.658386] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267627, 'name': CreateVM_Task, 'duration_secs': 0.35272} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.658562] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 838.659354] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.659547] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.659910] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 838.660340] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f00a6f75-5b67-4347-93a1-70adb6e1cf43 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.666111] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Waiting for the task: (returnval){ [ 838.666111] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5277b1d6-7c3a-c805-a1b3-231113cb4262" [ 838.666111] env[62813]: _type = "Task" [ 838.666111] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.680357] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5277b1d6-7c3a-c805-a1b3-231113cb4262, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.760399] env[62813]: DEBUG nova.compute.manager [req-06f8c9ce-e166-46e2-8cfa-dab17c97213f req-f1453066-203b-44d7-a1f5-13eb0be3a460 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Received event network-vif-plugged-c718ea23-f1d0-462e-a681-ff2b44ace946 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 838.762086] env[62813]: DEBUG oslo_concurrency.lockutils [req-06f8c9ce-e166-46e2-8cfa-dab17c97213f req-f1453066-203b-44d7-a1f5-13eb0be3a460 service nova] Acquiring lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.762086] env[62813]: DEBUG oslo_concurrency.lockutils [req-06f8c9ce-e166-46e2-8cfa-dab17c97213f req-f1453066-203b-44d7-a1f5-13eb0be3a460 service nova] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.762086] env[62813]: DEBUG oslo_concurrency.lockutils [req-06f8c9ce-e166-46e2-8cfa-dab17c97213f req-f1453066-203b-44d7-a1f5-13eb0be3a460 service nova] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.763452] env[62813]: DEBUG nova.compute.manager [req-06f8c9ce-e166-46e2-8cfa-dab17c97213f req-f1453066-203b-44d7-a1f5-13eb0be3a460 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] No waiting events found dispatching network-vif-plugged-c718ea23-f1d0-462e-a681-ff2b44ace946 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.763566] env[62813]: WARNING nova.compute.manager [req-06f8c9ce-e166-46e2-8cfa-dab17c97213f req-f1453066-203b-44d7-a1f5-13eb0be3a460 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Received unexpected event network-vif-plugged-c718ea23-f1d0-462e-a681-ff2b44ace946 for instance with vm_state building and task_state spawning. [ 839.057544] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5ef90781-9b78-4ea0-85ae-a263fa0f7e96 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "90f047bb-b344-445b-906e-ca8efedf6f60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.057761] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5ef90781-9b78-4ea0-85ae-a263fa0f7e96 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "90f047bb-b344-445b-906e-ca8efedf6f60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.177964] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.178288] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.178556] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.142823] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.975032] env[62813]: DEBUG nova.compute.manager [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Received event network-changed-c718ea23-f1d0-462e-a681-ff2b44ace946 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 840.975330] env[62813]: DEBUG nova.compute.manager [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Refreshing instance network info cache due to event network-changed-c718ea23-f1d0-462e-a681-ff2b44ace946. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 840.975444] env[62813]: DEBUG oslo_concurrency.lockutils [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] Acquiring lock "refresh_cache-b946bdda-a8a4-4a82-b2f7-99637fcae21c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.975653] env[62813]: DEBUG oslo_concurrency.lockutils [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] Acquired lock "refresh_cache-b946bdda-a8a4-4a82-b2f7-99637fcae21c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.975746] env[62813]: DEBUG nova.network.neutron [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Refreshing network info cache for port c718ea23-f1d0-462e-a681-ff2b44ace946 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.600182] env[62813]: DEBUG nova.network.neutron [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Updated VIF entry in instance network info cache for port c718ea23-f1d0-462e-a681-ff2b44ace946. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 841.600182] env[62813]: DEBUG nova.network.neutron [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Updating instance_info_cache with network_info: [{"id": "c718ea23-f1d0-462e-a681-ff2b44ace946", "address": "fa:16:3e:7b:f5:1b", "network": {"id": "64ae8cfe-bdf1-40c7-a5b5-375d2e4de176", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2127775475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cf8ad1c15764dc8a4a6ca3e0a2dd599", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc718ea23-f1", "ovs_interfaceid": "c718ea23-f1d0-462e-a681-ff2b44ace946", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.608576] env[62813]: DEBUG oslo_concurrency.lockutils [req-158bbd15-4778-4e28-b9f0-2d0545c82ed1 req-a89b5b8e-63d9-4ce7-bdb1-3dd6a3ed6973 service nova] Releasing lock "refresh_cache-b946bdda-a8a4-4a82-b2f7-99637fcae21c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.028627] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6499534e-1e11-4dd1-943d-66d3082a4b33 tempest-ServersNegativeTestJSON-1827764029 tempest-ServersNegativeTestJSON-1827764029-project-member] Acquiring lock "419ed7f3-e302-4b62-965c-d12dc88ff2c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.031957] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6499534e-1e11-4dd1-943d-66d3082a4b33 tempest-ServersNegativeTestJSON-1827764029 tempest-ServersNegativeTestJSON-1827764029-project-member] Lock "419ed7f3-e302-4b62-965c-d12dc88ff2c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.818021] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e5ef1f17-54ae-45f9-acfe-e9672aac21ca tempest-TenantUsagesTestJSON-835618339 tempest-TenantUsagesTestJSON-835618339-project-member] Acquiring lock "69c52191-e779-4bb4-b3aa-f39c8a70450b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.819046] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e5ef1f17-54ae-45f9-acfe-e9672aac21ca tempest-TenantUsagesTestJSON-835618339 tempest-TenantUsagesTestJSON-835618339-project-member] Lock "69c52191-e779-4bb4-b3aa-f39c8a70450b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.080508] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2f4db042-0ec4-4d47-a6e8-2e40d8e89b23 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Acquiring lock "f424eed0-7af3-45e7-b451-ddd6c23871f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.080508] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2f4db042-0ec4-4d47-a6e8-2e40d8e89b23 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "f424eed0-7af3-45e7-b451-ddd6c23871f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.182633] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3434c3de-e3c4-4064-95ae-05e434e4b08d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Acquiring lock "989c0e58-b997-44c5-bc4f-759a30fbbfe3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.182973] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3434c3de-e3c4-4064-95ae-05e434e4b08d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "989c0e58-b997-44c5-bc4f-759a30fbbfe3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.384723] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae934f32-2f05-40b0-9fc1-9945b203f114 tempest-ServerRescueTestJSONUnderV235-1256362219 tempest-ServerRescueTestJSONUnderV235-1256362219-project-member] Acquiring lock "ea2f91dc-1762-420a-90dc-c2e32811d911" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.385091] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae934f32-2f05-40b0-9fc1-9945b203f114 tempest-ServerRescueTestJSONUnderV235-1256362219 tempest-ServerRescueTestJSONUnderV235-1256362219-project-member] Lock "ea2f91dc-1762-420a-90dc-c2e32811d911" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.554118] env[62813]: DEBUG oslo_concurrency.lockutils [None req-eab368e8-3cde-4eaf-807e-231deb952ef9 tempest-ServerAddressesNegativeTestJSON-1449664058 tempest-ServerAddressesNegativeTestJSON-1449664058-project-member] Acquiring lock "a564338e-81c2-4b17-a507-081f6a57e190" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.554426] env[62813]: DEBUG oslo_concurrency.lockutils [None req-eab368e8-3cde-4eaf-807e-231deb952ef9 tempest-ServerAddressesNegativeTestJSON-1449664058 tempest-ServerAddressesNegativeTestJSON-1449664058-project-member] Lock "a564338e-81c2-4b17-a507-081f6a57e190" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.489406] env[62813]: WARNING oslo_vmware.rw_handles [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 882.489406] env[62813]: ERROR oslo_vmware.rw_handles [ 882.489406] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 882.490860] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 882.491180] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Copying Virtual Disk [datastore2] vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/719b8646-64f9-4584-8413-0325dffa093a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 882.491470] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98d15611-c05f-436b-b5af-ed7e9df099be {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.499641] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Waiting for the task: (returnval){ [ 882.499641] env[62813]: value = "task-4267628" [ 882.499641] env[62813]: _type = "Task" [ 882.499641] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.509845] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Task: {'id': task-4267628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.010480] env[62813]: DEBUG oslo_vmware.exceptions [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 883.010480] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.011016] env[62813]: ERROR nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 883.011016] env[62813]: Faults: ['InvalidArgument'] [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Traceback (most recent call last): [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] yield resources [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self.driver.spawn(context, instance, image_meta, [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self._vmops.spawn(context, instance, image_meta, injected_files, [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self._fetch_image_if_missing(context, vi) [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] image_cache(vi, tmp_image_ds_loc) [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] vm_util.copy_virtual_disk( [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] session._wait_for_task(vmdk_copy_task) [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] return self.wait_for_task(task_ref) [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] return evt.wait() [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] result = hub.switch() [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] return self.greenlet.switch() [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self.f(*self.args, **self.kw) [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] raise exceptions.translate_fault(task_info.error) [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Faults: ['InvalidArgument'] [ 883.011016] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] [ 883.012118] env[62813]: INFO nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Terminating instance [ 883.012890] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.013117] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.013353] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-220485e4-0189-4cf6-9992-136aa53877e2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.015829] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 883.016115] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 883.016772] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82819256-53cc-4254-84b2-e039a9fbb894 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.024109] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 883.024656] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-630bab93-24c0-460a-a7b7-d21ffecff94f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.027193] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.027370] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 883.028449] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-344bbd1d-88b2-4e3b-bd0d-d6ab86e4cfc3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.034058] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 883.034058] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52945a76-4f24-c656-1692-1412edbd2979" [ 883.034058] env[62813]: _type = "Task" [ 883.034058] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.047390] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52945a76-4f24-c656-1692-1412edbd2979, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.101789] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 883.102034] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 883.102228] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Deleting the datastore file [datastore2] 012bbc43-f61f-4aef-bd66-32fbe66f8374 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.102506] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05230527-5610-4853-b9a2-d758347d2552 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.111898] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Waiting for the task: (returnval){ [ 883.111898] env[62813]: value = "task-4267630" [ 883.111898] env[62813]: _type = "Task" [ 883.111898] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.121782] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Task: {'id': task-4267630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.549104] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 883.549104] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating directory with path [datastore2] vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.549104] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-296023ea-d27e-4053-8eb7-d36fa67bc8c0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.562026] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Created directory with path [datastore2] vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.562026] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Fetch image to [datastore2] vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 883.562026] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 883.562026] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaca423-b707-4f99-b7cf-2fef84076c1e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.570090] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de92009d-5023-4bf0-be41-c4df4b102917 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.580683] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fe5ecf-4ab7-4161-88d1-b52bccd8a5a2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.619023] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6652cc7a-d168-4748-9f91-400934634eab {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.626773] env[62813]: DEBUG oslo_vmware.api [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Task: {'id': task-4267630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076392} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.628262] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.628447] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 883.628614] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.628810] env[62813]: INFO nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Took 0.61 seconds to destroy the instance on the hypervisor. [ 883.630664] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-30f88175-ffce-4e1c-b243-181c7045f18d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.632691] env[62813]: DEBUG nova.compute.claims [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 883.632870] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.633093] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.733065] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 883.927223] env[62813]: DEBUG oslo_vmware.rw_handles [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 883.989009] env[62813]: DEBUG oslo_vmware.rw_handles [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 883.989561] env[62813]: DEBUG oslo_vmware.rw_handles [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 884.119282] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3768031a-930e-4ba7-8617-0ecb4d7189c2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.127527] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630bd793-442d-468d-839f-7820342d7eaf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.161228] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2d00b4-b35b-447d-88c8-226b787b7827 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.170364] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944f8bd5-188c-477b-83ea-c2da5094de4c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.186859] env[62813]: DEBUG nova.compute.provider_tree [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.195767] env[62813]: DEBUG nova.scheduler.client.report [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.214406] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.581s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.214970] env[62813]: ERROR nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 884.214970] env[62813]: Faults: ['InvalidArgument'] [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Traceback (most recent call last): [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self.driver.spawn(context, instance, image_meta, [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self._vmops.spawn(context, instance, image_meta, injected_files, [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self._fetch_image_if_missing(context, vi) [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] image_cache(vi, tmp_image_ds_loc) [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] vm_util.copy_virtual_disk( [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] session._wait_for_task(vmdk_copy_task) [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] return self.wait_for_task(task_ref) [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] return evt.wait() [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] result = hub.switch() [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] return self.greenlet.switch() [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] self.f(*self.args, **self.kw) [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] raise exceptions.translate_fault(task_info.error) [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Faults: ['InvalidArgument'] [ 884.214970] env[62813]: ERROR nova.compute.manager [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] [ 884.216461] env[62813]: DEBUG nova.compute.utils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 884.217308] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Build of instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 was re-scheduled: A specified parameter was not correct: fileType [ 884.217308] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 884.217722] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 884.217898] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 884.218074] env[62813]: DEBUG nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 884.218253] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 884.941092] env[62813]: DEBUG nova.network.neutron [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.958092] env[62813]: INFO nova.compute.manager [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Took 0.74 seconds to deallocate network for instance. [ 885.090674] env[62813]: INFO nova.scheduler.client.report [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Deleted allocations for instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 [ 885.117462] env[62813]: DEBUG oslo_concurrency.lockutils [None req-290b8bbf-4ead-4a72-abb3-261d8a6752fa tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 294.091s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.119545] env[62813]: DEBUG oslo_concurrency.lockutils [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 91.121s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.119545] env[62813]: DEBUG oslo_concurrency.lockutils [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Acquiring lock "012bbc43-f61f-4aef-bd66-32fbe66f8374-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.119545] env[62813]: DEBUG oslo_concurrency.lockutils [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.119545] env[62813]: DEBUG oslo_concurrency.lockutils [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.121395] env[62813]: INFO nova.compute.manager [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Terminating instance [ 885.124941] env[62813]: DEBUG nova.compute.manager [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 885.124941] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 885.124941] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d26fc18-336c-40df-a2a0-3acf42719111 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.133810] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4042939a-2c59-44d8-906f-f147b7ab8b5a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.155555] env[62813]: DEBUG nova.compute.manager [None req-0cae137f-23f0-434d-84f4-4382b69a7b06 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 704614e0-aaa3-48b6-8208-47af7ca0f367] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.168124] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 012bbc43-f61f-4aef-bd66-32fbe66f8374 could not be found. [ 885.168423] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 885.168650] env[62813]: INFO nova.compute.manager [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Took 0.05 seconds to destroy the instance on the hypervisor. [ 885.168990] env[62813]: DEBUG oslo.service.loopingcall [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.169278] env[62813]: DEBUG nova.compute.manager [-] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 885.169391] env[62813]: DEBUG nova.network.neutron [-] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 885.190628] env[62813]: DEBUG nova.compute.manager [None req-0cae137f-23f0-434d-84f4-4382b69a7b06 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 704614e0-aaa3-48b6-8208-47af7ca0f367] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.218473] env[62813]: DEBUG nova.network.neutron [-] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.231031] env[62813]: INFO nova.compute.manager [-] [instance: 012bbc43-f61f-4aef-bd66-32fbe66f8374] Took 0.06 seconds to deallocate network for instance. [ 885.236342] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0cae137f-23f0-434d-84f4-4382b69a7b06 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "704614e0-aaa3-48b6-8208-47af7ca0f367" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 247.781s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.253031] env[62813]: DEBUG nova.compute.manager [None req-66590f55-6eda-4198-b9e4-49e3ca294936 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b845396e-1641-4668-b687-348f1ee8b6f0] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.287172] env[62813]: DEBUG nova.compute.manager [None req-66590f55-6eda-4198-b9e4-49e3ca294936 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b845396e-1641-4668-b687-348f1ee8b6f0] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.327955] env[62813]: DEBUG oslo_concurrency.lockutils [None req-66590f55-6eda-4198-b9e4-49e3ca294936 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b845396e-1641-4668-b687-348f1ee8b6f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 246.094s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.356864] env[62813]: DEBUG nova.compute.manager [None req-b3318a6b-df2a-4728-81d8-99b938e8c641 tempest-ServerGroupTestJSON-752837002 tempest-ServerGroupTestJSON-752837002-project-member] [instance: 4e783b92-0668-4e70-9848-4b4320318603] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.396675] env[62813]: DEBUG oslo_concurrency.lockutils [None req-cf01784f-8e46-4034-937d-91eac91fde33 tempest-ServerDiagnosticsTest-667790976 tempest-ServerDiagnosticsTest-667790976-project-member] Lock "012bbc43-f61f-4aef-bd66-32fbe66f8374" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.278s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.409403] env[62813]: DEBUG nova.compute.manager [None req-b3318a6b-df2a-4728-81d8-99b938e8c641 tempest-ServerGroupTestJSON-752837002 tempest-ServerGroupTestJSON-752837002-project-member] [instance: 4e783b92-0668-4e70-9848-4b4320318603] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.434788] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b3318a6b-df2a-4728-81d8-99b938e8c641 tempest-ServerGroupTestJSON-752837002 tempest-ServerGroupTestJSON-752837002-project-member] Lock "4e783b92-0668-4e70-9848-4b4320318603" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 245.524s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.450965] env[62813]: DEBUG nova.compute.manager [None req-09f04951-984c-43e3-8d6f-b5fd6510acb3 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.477774] env[62813]: DEBUG nova.compute.manager [None req-09f04951-984c-43e3-8d6f-b5fd6510acb3 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] [instance: a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.502397] env[62813]: DEBUG oslo_concurrency.lockutils [None req-09f04951-984c-43e3-8d6f-b5fd6510acb3 tempest-DeleteServersAdminTestJSON-348383033 tempest-DeleteServersAdminTestJSON-348383033-project-member] Lock "a7f9eb88-e447-4f59-814e-6ebbb6a2ecb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.865s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.515352] env[62813]: DEBUG nova.compute.manager [None req-394d360c-2cce-4de9-a54b-a742cb2d6faf tempest-ServersAdminNegativeTestJSON-1375939108 tempest-ServersAdminNegativeTestJSON-1375939108-project-member] [instance: 51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.543704] env[62813]: DEBUG nova.compute.manager [None req-394d360c-2cce-4de9-a54b-a742cb2d6faf tempest-ServersAdminNegativeTestJSON-1375939108 tempest-ServersAdminNegativeTestJSON-1375939108-project-member] [instance: 51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.573044] env[62813]: DEBUG oslo_concurrency.lockutils [None req-394d360c-2cce-4de9-a54b-a742cb2d6faf tempest-ServersAdminNegativeTestJSON-1375939108 tempest-ServersAdminNegativeTestJSON-1375939108-project-member] Lock "51a8bec7-c8fd-4efb-ad91-eb34bb40a8fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.148s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.586175] env[62813]: DEBUG nova.compute.manager [None req-be42db35-1275-4746-aa01-dc7440d88d06 tempest-ServersV294TestFqdnHostnames-788616215 tempest-ServersV294TestFqdnHostnames-788616215-project-member] [instance: 097bae8e-614d-4322-b767-d56e0dc1b658] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.625301] env[62813]: DEBUG nova.compute.manager [None req-be42db35-1275-4746-aa01-dc7440d88d06 tempest-ServersV294TestFqdnHostnames-788616215 tempest-ServersV294TestFqdnHostnames-788616215-project-member] [instance: 097bae8e-614d-4322-b767-d56e0dc1b658] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.657601] env[62813]: DEBUG oslo_concurrency.lockutils [None req-be42db35-1275-4746-aa01-dc7440d88d06 tempest-ServersV294TestFqdnHostnames-788616215 tempest-ServersV294TestFqdnHostnames-788616215-project-member] Lock "097bae8e-614d-4322-b767-d56e0dc1b658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.485s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.667514] env[62813]: DEBUG nova.compute.manager [None req-2c38552a-fe03-4fdc-9a39-8ca3ee2ca58f tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: 22549912-2253-42bb-b2d7-8d0512c2a9d7] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.697943] env[62813]: DEBUG nova.compute.manager [None req-2c38552a-fe03-4fdc-9a39-8ca3ee2ca58f tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: 22549912-2253-42bb-b2d7-8d0512c2a9d7] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.722421] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2c38552a-fe03-4fdc-9a39-8ca3ee2ca58f tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "22549912-2253-42bb-b2d7-8d0512c2a9d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.720s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.732370] env[62813]: DEBUG nova.compute.manager [None req-e2fe2488-28fd-41f7-968a-a894c48c1428 tempest-ServerDiagnosticsV248Test-429699338 tempest-ServerDiagnosticsV248Test-429699338-project-member] [instance: 7cdd84be-1e0a-4e4a-9e40-b4d589f08914] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.760173] env[62813]: DEBUG nova.compute.manager [None req-e2fe2488-28fd-41f7-968a-a894c48c1428 tempest-ServerDiagnosticsV248Test-429699338 tempest-ServerDiagnosticsV248Test-429699338-project-member] [instance: 7cdd84be-1e0a-4e4a-9e40-b4d589f08914] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.790789] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e2fe2488-28fd-41f7-968a-a894c48c1428 tempest-ServerDiagnosticsV248Test-429699338 tempest-ServerDiagnosticsV248Test-429699338-project-member] Lock "7cdd84be-1e0a-4e4a-9e40-b4d589f08914" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.294s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.800911] env[62813]: DEBUG nova.compute.manager [None req-339dccd5-967e-4e11-925b-b1d62468c6e5 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: f863dfb3-98d5-473b-9e41-85984e350070] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.828172] env[62813]: DEBUG nova.compute.manager [None req-339dccd5-967e-4e11-925b-b1d62468c6e5 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: f863dfb3-98d5-473b-9e41-85984e350070] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.856165] env[62813]: DEBUG oslo_concurrency.lockutils [None req-339dccd5-967e-4e11-925b-b1d62468c6e5 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "f863dfb3-98d5-473b-9e41-85984e350070" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.250s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.866201] env[62813]: DEBUG nova.compute.manager [None req-3d7f5846-71d8-42dc-8a7c-3ea7a9bb21b6 tempest-ServersAdmin275Test-2144933203 tempest-ServersAdmin275Test-2144933203-project-member] [instance: 952194e3-6318-4ecc-8d48-bda3811c4d49] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.909554] env[62813]: DEBUG nova.compute.manager [None req-3d7f5846-71d8-42dc-8a7c-3ea7a9bb21b6 tempest-ServersAdmin275Test-2144933203 tempest-ServersAdmin275Test-2144933203-project-member] [instance: 952194e3-6318-4ecc-8d48-bda3811c4d49] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.938254] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3d7f5846-71d8-42dc-8a7c-3ea7a9bb21b6 tempest-ServersAdmin275Test-2144933203 tempest-ServersAdmin275Test-2144933203-project-member] Lock "952194e3-6318-4ecc-8d48-bda3811c4d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.443s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.951746] env[62813]: DEBUG nova.compute.manager [None req-8ddb9483-faed-4843-851d-b9aec164990f tempest-ServerActionsV293TestJSON-132017951 tempest-ServerActionsV293TestJSON-132017951-project-member] [instance: 5b935b76-027b-4b4a-a61b-3e4cc2f36c08] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.999290] env[62813]: DEBUG nova.compute.manager [None req-8ddb9483-faed-4843-851d-b9aec164990f tempest-ServerActionsV293TestJSON-132017951 tempest-ServerActionsV293TestJSON-132017951-project-member] [instance: 5b935b76-027b-4b4a-a61b-3e4cc2f36c08] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 886.026330] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8ddb9483-faed-4843-851d-b9aec164990f tempest-ServerActionsV293TestJSON-132017951 tempest-ServerActionsV293TestJSON-132017951-project-member] Lock "5b935b76-027b-4b4a-a61b-3e4cc2f36c08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.595s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.046616] env[62813]: DEBUG nova.compute.manager [None req-fcde9f16-cd7a-4130-a398-399d73f669f4 tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 8852d058-a494-47e4-977d-289b5126f7ae] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 886.078583] env[62813]: DEBUG nova.compute.manager [None req-fcde9f16-cd7a-4130-a398-399d73f669f4 tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] [instance: 8852d058-a494-47e4-977d-289b5126f7ae] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 886.106032] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fcde9f16-cd7a-4130-a398-399d73f669f4 tempest-MigrationsAdminTest-1996685324 tempest-MigrationsAdminTest-1996685324-project-member] Lock "8852d058-a494-47e4-977d-289b5126f7ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.107s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.117529] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 886.215142] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.215477] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.218065] env[62813]: INFO nova.compute.claims [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.684986] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bff5e7-a081-4f2c-b15d-cde6319f52bb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.696365] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b1662a-603d-4260-9d8d-fc47d55c6012 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.736304] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3d4a4b-a632-4b4f-8d0a-3577d4d2514b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.750625] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71057708-5a3a-47e0-aab4-fa17141b357c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.770550] env[62813]: DEBUG nova.compute.provider_tree [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.782694] env[62813]: DEBUG nova.scheduler.client.report [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.808386] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.593s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.809339] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 886.854656] env[62813]: DEBUG nova.compute.utils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.856237] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 886.856335] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 886.866352] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 886.947865] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 886.983918] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 886.984281] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 886.984346] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.984530] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 886.984676] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.984827] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 886.985208] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 886.985428] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 886.985613] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 886.985783] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 886.985975] env[62813]: DEBUG nova.virt.hardware [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 886.987476] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8287b034-291d-4baa-ba78-240e0ee5c773 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.999081] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089c6b67-2695-4405-b3da-9a98cb0e8eff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.019304] env[62813]: DEBUG nova.policy [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d35c8be82e4fc9869db5e897120b41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f31911e62f9b45ae85874fccc7a916f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 887.949080] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Successfully created port: 1172ef80-0d9a-4778-b02b-1d7668a1d266 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.355709] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Successfully updated port: 1172ef80-0d9a-4778-b02b-1d7668a1d266 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.374747] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "refresh_cache-99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.375070] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired lock "refresh_cache-99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.375070] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 889.457252] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 889.716665] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Updating instance_info_cache with network_info: [{"id": "1172ef80-0d9a-4778-b02b-1d7668a1d266", "address": "fa:16:3e:cd:03:c1", "network": {"id": "2cd9a110-b173-4c4d-a5a9-9372ad45917a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454094257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31911e62f9b45ae85874fccc7a916f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1172ef80-0d", "ovs_interfaceid": "1172ef80-0d9a-4778-b02b-1d7668a1d266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.740897] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Releasing lock "refresh_cache-99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.741166] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance network_info: |[{"id": "1172ef80-0d9a-4778-b02b-1d7668a1d266", "address": "fa:16:3e:cd:03:c1", "network": {"id": "2cd9a110-b173-4c4d-a5a9-9372ad45917a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454094257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31911e62f9b45ae85874fccc7a916f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1172ef80-0d", "ovs_interfaceid": "1172ef80-0d9a-4778-b02b-1d7668a1d266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.741598] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:03:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1172ef80-0d9a-4778-b02b-1d7668a1d266', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 889.749572] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating folder: Project (f31911e62f9b45ae85874fccc7a916f6). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 889.751166] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41149cb5-7923-42eb-a008-010f45e68c89 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.763102] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Created folder: Project (f31911e62f9b45ae85874fccc7a916f6) in parent group-v840812. [ 889.763102] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating folder: Instances. Parent ref: group-v840860. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 889.763276] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9346b477-8a41-4cc1-8a1e-77a722d1d1ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.775106] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Created folder: Instances in parent group-v840860. [ 889.775607] env[62813]: DEBUG oslo.service.loopingcall [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 889.775689] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 889.775860] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d72274d-8b94-4706-936a-a122f370b1c1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.799288] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 889.799288] env[62813]: value = "task-4267633" [ 889.799288] env[62813]: _type = "Task" [ 889.799288] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.807393] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267633, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.152756] env[62813]: DEBUG nova.compute.manager [req-13c29c2d-ad10-4436-82a8-f19406ee942c req-63571d77-ee98-4820-96c3-baa662b0aba0 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Received event network-vif-plugged-1172ef80-0d9a-4778-b02b-1d7668a1d266 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 890.152807] env[62813]: DEBUG oslo_concurrency.lockutils [req-13c29c2d-ad10-4436-82a8-f19406ee942c req-63571d77-ee98-4820-96c3-baa662b0aba0 service nova] Acquiring lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.152983] env[62813]: DEBUG oslo_concurrency.lockutils [req-13c29c2d-ad10-4436-82a8-f19406ee942c req-63571d77-ee98-4820-96c3-baa662b0aba0 service nova] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.153774] env[62813]: DEBUG oslo_concurrency.lockutils [req-13c29c2d-ad10-4436-82a8-f19406ee942c req-63571d77-ee98-4820-96c3-baa662b0aba0 service nova] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.153982] env[62813]: DEBUG nova.compute.manager [req-13c29c2d-ad10-4436-82a8-f19406ee942c req-63571d77-ee98-4820-96c3-baa662b0aba0 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] No waiting events found dispatching network-vif-plugged-1172ef80-0d9a-4778-b02b-1d7668a1d266 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 890.154215] env[62813]: WARNING nova.compute.manager [req-13c29c2d-ad10-4436-82a8-f19406ee942c req-63571d77-ee98-4820-96c3-baa662b0aba0 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Received unexpected event network-vif-plugged-1172ef80-0d9a-4778-b02b-1d7668a1d266 for instance with vm_state building and task_state spawning. [ 890.288261] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.288261] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.313568] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267633, 'name': CreateVM_Task, 'duration_secs': 0.382636} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.313756] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 890.314525] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.314698] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.315048] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 890.315319] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c906891d-08da-4de2-b59f-8a5d19ad251b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.321114] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 890.321114] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52193a14-feae-7146-95df-1412a64de311" [ 890.321114] env[62813]: _type = "Task" [ 890.321114] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.331032] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52193a14-feae-7146-95df-1412a64de311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.836343] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.836343] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.837512] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.142832] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.163545] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.163777] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 892.322017] env[62813]: DEBUG nova.compute.manager [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Received event network-changed-1172ef80-0d9a-4778-b02b-1d7668a1d266 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 892.322017] env[62813]: DEBUG nova.compute.manager [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Refreshing instance network info cache due to event network-changed-1172ef80-0d9a-4778-b02b-1d7668a1d266. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 892.322017] env[62813]: DEBUG oslo_concurrency.lockutils [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] Acquiring lock "refresh_cache-99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.322639] env[62813]: DEBUG oslo_concurrency.lockutils [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] Acquired lock "refresh_cache-99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.323093] env[62813]: DEBUG nova.network.neutron [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Refreshing network info cache for port 1172ef80-0d9a-4778-b02b-1d7668a1d266 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 893.109446] env[62813]: DEBUG nova.network.neutron [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Updated VIF entry in instance network info cache for port 1172ef80-0d9a-4778-b02b-1d7668a1d266. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 893.109793] env[62813]: DEBUG nova.network.neutron [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Updating instance_info_cache with network_info: [{"id": "1172ef80-0d9a-4778-b02b-1d7668a1d266", "address": "fa:16:3e:cd:03:c1", "network": {"id": "2cd9a110-b173-4c4d-a5a9-9372ad45917a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454094257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31911e62f9b45ae85874fccc7a916f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1172ef80-0d", "ovs_interfaceid": "1172ef80-0d9a-4778-b02b-1d7668a1d266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.122115] env[62813]: DEBUG oslo_concurrency.lockutils [req-d8b66519-9f14-4864-b953-0d4bba6a9052 req-235a129f-0750-4f09-8429-7a24b8c06590 service nova] Releasing lock "refresh_cache-99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.164552] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.164552] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.164127] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.159220] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.160730] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.191466] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.191641] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 895.193692] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 895.221844] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222407] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222407] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222407] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222407] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222567] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222567] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.222658] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.223033] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.223033] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 895.223033] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 895.223542] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.026236] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c50224e8-5bc1-465f-a52f-f33fcfe4adc0 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] Acquiring lock "e9e201ea-9561-483b-a39e-6180fc6f5a2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.026533] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c50224e8-5bc1-465f-a52f-f33fcfe4adc0 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] Lock "e9e201ea-9561-483b-a39e-6180fc6f5a2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.732169] env[62813]: DEBUG oslo_concurrency.lockutils [None req-de566c29-5c77-4c6f-aa2b-419069f5eaa9 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] Acquiring lock "287a3859-9150-414f-a4ef-2ba3af8edc8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.732169] env[62813]: DEBUG oslo_concurrency.lockutils [None req-de566c29-5c77-4c6f-aa2b-419069f5eaa9 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] Lock "287a3859-9150-414f-a4ef-2ba3af8edc8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.164618] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.180941] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.181253] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.181671] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.181861] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 898.186310] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b5a39b-6a74-4940-8029-33d6be9ade8b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.194411] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7682f9dc-55df-433c-a350-9187eda59527 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.212603] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b579d75b-8ea4-4573-bf1e-5874c351aa7e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.220691] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a73c5b-f913-4a8c-99a4-0c3476f69eb9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.255875] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180764MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 898.256055] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.256277] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.355672] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.355672] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 09aa702f-a28c-429b-83d9-378be8606a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.355672] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.355672] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.355672] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.356039] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.356039] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.356039] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.356183] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.356238] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 898.378270] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.392447] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.403591] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 40655a50-5c68-4141-be93-f7a39aa5a168 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.415976] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.431538] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6ed29c0d-710c-4f2f-b321-bbd8d253f918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.450453] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 90f047bb-b344-445b-906e-ca8efedf6f60 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.461968] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 419ed7f3-e302-4b62-965c-d12dc88ff2c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.475535] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 69c52191-e779-4bb4-b3aa-f39c8a70450b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.487060] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f424eed0-7af3-45e7-b451-ddd6c23871f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.497541] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 989c0e58-b997-44c5-bc4f-759a30fbbfe3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.509929] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance ea2f91dc-1762-420a-90dc-c2e32811d911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.525307] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a564338e-81c2-4b17-a507-081f6a57e190 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.539299] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.554409] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e9e201ea-9561-483b-a39e-6180fc6f5a2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.571773] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 287a3859-9150-414f-a4ef-2ba3af8edc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 898.572154] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 898.573128] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 898.972619] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e85015-bc74-47af-9087-6cdfc0f7e867 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.981580] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f1c9f7-0668-4825-9ac8-d90d633df597 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.013721] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebad8c88-a188-4335-82fb-83d68277d883 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.021821] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287cdcb4-7b4d-4246-9e05-b624540839ff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.037628] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.045971] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 899.064043] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 899.064043] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.807s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.872164] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4c5a96f3-ab92-4b2f-99ba-b662823f7abb tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.872164] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4c5a96f3-ab92-4b2f-99ba-b662823f7abb tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.557430] env[62813]: WARNING oslo_vmware.rw_handles [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 930.557430] env[62813]: ERROR oslo_vmware.rw_handles [ 930.558222] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 930.559746] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 930.559997] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Copying Virtual Disk [datastore2] vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/32ed2d68-0f8d-440c-9c03-2391e603f0e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 930.560332] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54ea3d33-2c9b-43be-9b10-910f8c3323c8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.571600] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 930.571600] env[62813]: value = "task-4267634" [ 930.571600] env[62813]: _type = "Task" [ 930.571600] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.580501] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': task-4267634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.082636] env[62813]: DEBUG oslo_vmware.exceptions [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 931.082930] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.083516] env[62813]: ERROR nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 931.083516] env[62813]: Faults: ['InvalidArgument'] [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Traceback (most recent call last): [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] yield resources [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self.driver.spawn(context, instance, image_meta, [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self._fetch_image_if_missing(context, vi) [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] image_cache(vi, tmp_image_ds_loc) [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] vm_util.copy_virtual_disk( [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] session._wait_for_task(vmdk_copy_task) [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] return self.wait_for_task(task_ref) [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] return evt.wait() [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] result = hub.switch() [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] return self.greenlet.switch() [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self.f(*self.args, **self.kw) [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] raise exceptions.translate_fault(task_info.error) [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Faults: ['InvalidArgument'] [ 931.083516] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] [ 931.084645] env[62813]: INFO nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Terminating instance [ 931.085506] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.085740] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.085989] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c16ad1da-0cb4-47c7-9e91-1649ccb324b2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.088302] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 931.088538] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 931.089326] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd46a32e-f06c-41a9-8f2a-7ee611e8499d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.096556] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 931.096872] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb7c0500-7634-4b77-ad35-9ac4d7d3d158 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.099231] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.099411] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 931.100427] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02a5b732-13e5-465c-9845-41ffff9b535f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.105528] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Waiting for the task: (returnval){ [ 931.105528] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ba2944-3179-6916-6168-2bc3a969c20b" [ 931.105528] env[62813]: _type = "Task" [ 931.105528] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.113340] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ba2944-3179-6916-6168-2bc3a969c20b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.258715] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 931.259093] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 931.259415] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Deleting the datastore file [datastore2] 65d620ad-0863-4947-945e-0e4b3c01d3a3 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.259771] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7f7dae6-036d-4739-91ab-7237e62343ed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.268185] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 931.268185] env[62813]: value = "task-4267636" [ 931.268185] env[62813]: _type = "Task" [ 931.268185] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.277239] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': task-4267636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.618049] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 931.618049] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Creating directory with path [datastore2] vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.618049] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d20c418-b567-4ed2-a83d-48013483d655 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.630222] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Created directory with path [datastore2] vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.630483] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Fetch image to [datastore2] vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 931.630666] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 931.631472] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cd086e-2668-4675-ae7c-7ed0f91cc77f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.638640] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158eb916-e5ed-4fcf-8986-31b03b33e9b0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.648763] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd787de9-8112-4ebf-8a4b-6ffd6eebcd56 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.682055] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f10e68-4e1d-4589-84af-25b7ef9d6f26 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.687915] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1b6abfaf-89c5-4838-843a-530839ebe966 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.710232] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 931.768243] env[62813]: DEBUG oslo_vmware.rw_handles [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 931.831922] env[62813]: DEBUG oslo_vmware.api [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': task-4267636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102019} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.832870] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.833083] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 931.833270] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 931.833452] env[62813]: INFO nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Took 0.74 seconds to destroy the instance on the hypervisor. [ 931.835308] env[62813]: DEBUG oslo_vmware.rw_handles [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 931.835479] env[62813]: DEBUG oslo_vmware.rw_handles [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 931.836087] env[62813]: DEBUG nova.compute.claims [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 931.836284] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.836509] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.237294] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4cd392-343f-41dd-afc7-3e1f33d825c2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.245442] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7e2eda-ead1-49a0-bef4-a2261b256612 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.276352] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246a9f55-7070-449c-b18e-32789ff30cde {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.284347] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437b2009-5123-4c26-ba02-18766fbde005 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.298208] env[62813]: DEBUG nova.compute.provider_tree [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.307996] env[62813]: DEBUG nova.scheduler.client.report [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 932.322699] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.486s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.323289] env[62813]: ERROR nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 932.323289] env[62813]: Faults: ['InvalidArgument'] [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Traceback (most recent call last): [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self.driver.spawn(context, instance, image_meta, [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self._fetch_image_if_missing(context, vi) [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] image_cache(vi, tmp_image_ds_loc) [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] vm_util.copy_virtual_disk( [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] session._wait_for_task(vmdk_copy_task) [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] return self.wait_for_task(task_ref) [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] return evt.wait() [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] result = hub.switch() [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] return self.greenlet.switch() [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] self.f(*self.args, **self.kw) [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] raise exceptions.translate_fault(task_info.error) [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Faults: ['InvalidArgument'] [ 932.323289] env[62813]: ERROR nova.compute.manager [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] [ 932.324321] env[62813]: DEBUG nova.compute.utils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 932.325813] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Build of instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 was re-scheduled: A specified parameter was not correct: fileType [ 932.325813] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 932.326237] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 932.326439] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 932.326610] env[62813]: DEBUG nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 932.326796] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 933.719519] env[62813]: DEBUG nova.network.neutron [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.735780] env[62813]: INFO nova.compute.manager [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Took 1.41 seconds to deallocate network for instance. [ 933.840547] env[62813]: INFO nova.scheduler.client.report [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Deleted allocations for instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 [ 933.861667] env[62813]: DEBUG oslo_concurrency.lockutils [None req-78248832-78c0-4940-91b7-beb1458cdb8f tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 350.004s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.863177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 148.514s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.866210] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.866210] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.866210] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.868054] env[62813]: INFO nova.compute.manager [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Terminating instance [ 933.872610] env[62813]: DEBUG nova.compute.manager [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 933.872610] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 933.872610] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2df60256-beb9-48eb-94bb-7677b62088c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.882041] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27505729-548f-4c28-9273-bbbb78d35ce4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.895915] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 933.917902] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 65d620ad-0863-4947-945e-0e4b3c01d3a3 could not be found. [ 933.918154] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 933.918344] env[62813]: INFO nova.compute.manager [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 933.918625] env[62813]: DEBUG oslo.service.loopingcall [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.918861] env[62813]: DEBUG nova.compute.manager [-] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 933.918959] env[62813]: DEBUG nova.network.neutron [-] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 933.955515] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.955853] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.957328] env[62813]: INFO nova.compute.claims [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.961775] env[62813]: DEBUG nova.network.neutron [-] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.972254] env[62813]: INFO nova.compute.manager [-] [instance: 65d620ad-0863-4947-945e-0e4b3c01d3a3] Took 0.05 seconds to deallocate network for instance. [ 934.084063] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f761a8a2-5868-4f15-889f-6c65de75e2c6 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "65d620ad-0863-4947-945e-0e4b3c01d3a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.366384] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b233002-2f60-4cd2-86de-ab60b3e3f86e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.374196] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c85a54-7ee0-4cdf-b6d1-a98254399436 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.404159] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcf73c9-2b48-47db-8300-6746b5842ba9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.412249] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9ffbc5-461a-47ff-beb5-a23717269dde {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.425797] env[62813]: DEBUG nova.compute.provider_tree [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.435056] env[62813]: DEBUG nova.scheduler.client.report [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 934.450645] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.495s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.451200] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 934.490132] env[62813]: DEBUG nova.compute.utils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.492409] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 934.492409] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 934.501158] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 934.564755] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 934.593752] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.594015] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.594189] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.594376] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.594529] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.594678] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.594892] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.595067] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.595246] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.595409] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.595582] env[62813]: DEBUG nova.virt.hardware [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.596496] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194e6024-8dd6-4cff-b5b9-a65f5cdc9509 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.600505] env[62813]: DEBUG nova.policy [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b68bedf0337419bb393071f19ce60db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '153e3527d4cf437ba34513b8edb5485f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 934.607751] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0885711a-ff3c-4f71-8be4-2f3fcc9d80c5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.134344] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Successfully created port: 23e58488-ef49-4087-a0c3-ac18bac08495 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.670345] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Successfully updated port: 23e58488-ef49-4087-a0c3-ac18bac08495 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.687899] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "refresh_cache-c9b7bace-d76a-4dd8-8283-b56fd86a77a4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.687899] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquired lock "refresh_cache-c9b7bace-d76a-4dd8-8283-b56fd86a77a4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.687899] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 936.767854] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.162105] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Updating instance_info_cache with network_info: [{"id": "23e58488-ef49-4087-a0c3-ac18bac08495", "address": "fa:16:3e:29:97:fd", "network": {"id": "4f3521a7-3638-43cc-b306-5247dd3369c4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1774302930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "153e3527d4cf437ba34513b8edb5485f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e58488-ef", "ovs_interfaceid": "23e58488-ef49-4087-a0c3-ac18bac08495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.178683] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Releasing lock "refresh_cache-c9b7bace-d76a-4dd8-8283-b56fd86a77a4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.178983] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance network_info: |[{"id": "23e58488-ef49-4087-a0c3-ac18bac08495", "address": "fa:16:3e:29:97:fd", "network": {"id": "4f3521a7-3638-43cc-b306-5247dd3369c4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1774302930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "153e3527d4cf437ba34513b8edb5485f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e58488-ef", "ovs_interfaceid": "23e58488-ef49-4087-a0c3-ac18bac08495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 937.180027] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:97:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd829efb7-e98e-4b67-bd03-b0888287dbfd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23e58488-ef49-4087-a0c3-ac18bac08495', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.187673] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Creating folder: Project (153e3527d4cf437ba34513b8edb5485f). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 937.188415] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e77cff21-c567-4e76-a8e3-3b3c74b70567 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.202143] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Created folder: Project (153e3527d4cf437ba34513b8edb5485f) in parent group-v840812. [ 937.202143] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Creating folder: Instances. Parent ref: group-v840863. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 937.202143] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-062092bd-3acf-4c89-ab60-aff9c356c57f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.211455] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Created folder: Instances in parent group-v840863. [ 937.211750] env[62813]: DEBUG oslo.service.loopingcall [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.211981] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 937.212351] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86a8274e-3486-4419-9e44-cbb05d63a6fa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.237952] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.237952] env[62813]: value = "task-4267639" [ 937.237952] env[62813]: _type = "Task" [ 937.237952] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.246432] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267639, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.750072] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267639, 'name': CreateVM_Task, 'duration_secs': 0.326349} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.750506] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 937.751221] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.751390] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.751731] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 937.751993] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a8c3dbc-a453-40aa-be28-ee2179e2e9d9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.758251] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Waiting for the task: (returnval){ [ 937.758251] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52b21bdf-a163-ea66-73d9-bf922a73b558" [ 937.758251] env[62813]: _type = "Task" [ 937.758251] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.770020] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52b21bdf-a163-ea66-73d9-bf922a73b558, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.808080] env[62813]: DEBUG nova.compute.manager [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Received event network-vif-plugged-23e58488-ef49-4087-a0c3-ac18bac08495 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 937.809435] env[62813]: DEBUG oslo_concurrency.lockutils [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] Acquiring lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.809435] env[62813]: DEBUG oslo_concurrency.lockutils [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.809435] env[62813]: DEBUG oslo_concurrency.lockutils [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.809435] env[62813]: DEBUG nova.compute.manager [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] No waiting events found dispatching network-vif-plugged-23e58488-ef49-4087-a0c3-ac18bac08495 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 937.809435] env[62813]: WARNING nova.compute.manager [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Received unexpected event network-vif-plugged-23e58488-ef49-4087-a0c3-ac18bac08495 for instance with vm_state building and task_state spawning. [ 937.810253] env[62813]: DEBUG nova.compute.manager [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Received event network-changed-23e58488-ef49-4087-a0c3-ac18bac08495 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 937.810253] env[62813]: DEBUG nova.compute.manager [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Refreshing instance network info cache due to event network-changed-23e58488-ef49-4087-a0c3-ac18bac08495. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 937.810253] env[62813]: DEBUG oslo_concurrency.lockutils [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] Acquiring lock "refresh_cache-c9b7bace-d76a-4dd8-8283-b56fd86a77a4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.810253] env[62813]: DEBUG oslo_concurrency.lockutils [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] Acquired lock "refresh_cache-c9b7bace-d76a-4dd8-8283-b56fd86a77a4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.810253] env[62813]: DEBUG nova.network.neutron [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Refreshing network info cache for port 23e58488-ef49-4087-a0c3-ac18bac08495 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 938.269633] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.269968] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.270207] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.431667] env[62813]: DEBUG nova.network.neutron [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Updated VIF entry in instance network info cache for port 23e58488-ef49-4087-a0c3-ac18bac08495. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 938.431953] env[62813]: DEBUG nova.network.neutron [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Updating instance_info_cache with network_info: [{"id": "23e58488-ef49-4087-a0c3-ac18bac08495", "address": "fa:16:3e:29:97:fd", "network": {"id": "4f3521a7-3638-43cc-b306-5247dd3369c4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1774302930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "153e3527d4cf437ba34513b8edb5485f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23e58488-ef", "ovs_interfaceid": "23e58488-ef49-4087-a0c3-ac18bac08495", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.445477] env[62813]: DEBUG oslo_concurrency.lockutils [req-704b3d73-964b-4128-9e2d-84b4c8a73615 req-e5c459ab-a5fa-499f-8c02-30df6efba6a5 service nova] Releasing lock "refresh_cache-c9b7bace-d76a-4dd8-8283-b56fd86a77a4" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.026094] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.026094] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.281256] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.063401] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.164131] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.164377] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.164543] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.164691] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 955.163583] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.164586] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.164918] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 956.164918] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 956.187088] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187437] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187437] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187555] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187650] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187754] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187875] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.187994] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.188133] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.188253] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 956.188374] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 957.164247] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.164533] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.164117] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.178313] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.178569] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.178746] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.178908] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 959.180223] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12b7f03-ed14-4904-abcf-e69a67b0c84b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.189218] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f9c7bd-1974-4cc2-9190-4f5e795b57d8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.204215] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2327fe7-de9e-412d-bacc-98c88fadcc78 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.211273] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bace572a-3edb-4e51-9872-d8f9c64eacc6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.242063] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180746MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 959.242242] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.242441] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.327892] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 09aa702f-a28c-429b-83d9-378be8606a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328093] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328229] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328355] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328480] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328601] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328719] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328838] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.328957] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.329096] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 959.340848] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.351943] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 40655a50-5c68-4141-be93-f7a39aa5a168 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.362175] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.373430] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6ed29c0d-710c-4f2f-b321-bbd8d253f918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.384945] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 90f047bb-b344-445b-906e-ca8efedf6f60 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.394598] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 419ed7f3-e302-4b62-965c-d12dc88ff2c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.407112] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 69c52191-e779-4bb4-b3aa-f39c8a70450b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.417493] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f424eed0-7af3-45e7-b451-ddd6c23871f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.428926] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 989c0e58-b997-44c5-bc4f-759a30fbbfe3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.439193] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance ea2f91dc-1762-420a-90dc-c2e32811d911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.452657] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a564338e-81c2-4b17-a507-081f6a57e190 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.462928] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.473516] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e9e201ea-9561-483b-a39e-6180fc6f5a2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.483732] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 287a3859-9150-414f-a4ef-2ba3af8edc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.494532] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.506568] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 959.508094] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 959.508094] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 959.842866] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b73a6e-7034-4e10-a4da-be12ad4f8110 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.851252] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8872884-69b6-4604-bffd-768631201764 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.881930] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4074d25-d8d0-4f50-8862-df9184cf1716 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.891400] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecec670-ddc8-4d92-a0f0-34a4be0db251 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.905358] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.915402] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.932656] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 959.932869] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.690s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.145055] env[62813]: WARNING oslo_vmware.rw_handles [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 979.145055] env[62813]: ERROR oslo_vmware.rw_handles [ 979.145055] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 979.146977] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 979.147300] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Copying Virtual Disk [datastore2] vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/a9654a50-f149-4139-b62f-39b3782f9be9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 979.147647] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2027b907-b4ec-4e0c-b510-7c1fc688d1c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.157471] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Waiting for the task: (returnval){ [ 979.157471] env[62813]: value = "task-4267640" [ 979.157471] env[62813]: _type = "Task" [ 979.157471] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.166126] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Task: {'id': task-4267640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.667709] env[62813]: DEBUG oslo_vmware.exceptions [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 979.667996] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.668576] env[62813]: ERROR nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 979.668576] env[62813]: Faults: ['InvalidArgument'] [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Traceback (most recent call last): [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] yield resources [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self.driver.spawn(context, instance, image_meta, [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self._fetch_image_if_missing(context, vi) [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] image_cache(vi, tmp_image_ds_loc) [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] vm_util.copy_virtual_disk( [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] session._wait_for_task(vmdk_copy_task) [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] return self.wait_for_task(task_ref) [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] return evt.wait() [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] result = hub.switch() [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] return self.greenlet.switch() [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self.f(*self.args, **self.kw) [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] raise exceptions.translate_fault(task_info.error) [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Faults: ['InvalidArgument'] [ 979.668576] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] [ 979.669400] env[62813]: INFO nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Terminating instance [ 979.670520] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.670730] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.670965] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-effdeb13-a1a8-4804-8f09-326fc8ec3370 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.673157] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 979.673354] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 979.674160] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ccb19d-d364-422a-8caf-9f431ee07a86 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.682044] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 979.682044] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d96eeccb-0c8c-4a68-a453-fe28f05d7197 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.684204] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.684446] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 979.685357] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06f49812-8e8a-4122-8133-a6abd1bce203 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.690528] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 979.690528] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52878fdf-1f16-b6bd-5994-8ac66d7d06fd" [ 979.690528] env[62813]: _type = "Task" [ 979.690528] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.698335] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52878fdf-1f16-b6bd-5994-8ac66d7d06fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.765290] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 979.765541] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 979.765809] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Deleting the datastore file [datastore2] 09aa702f-a28c-429b-83d9-378be8606a29 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.766139] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79dba842-da3f-4fde-9657-5615d5e2dfc7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.773071] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Waiting for the task: (returnval){ [ 979.773071] env[62813]: value = "task-4267642" [ 979.773071] env[62813]: _type = "Task" [ 979.773071] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.782095] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Task: {'id': task-4267642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.202967] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 980.203265] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating directory with path [datastore2] vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.203512] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9264088-c860-4d50-aef2-3e6420d7cb7c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.216534] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Created directory with path [datastore2] vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.216741] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Fetch image to [datastore2] vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 980.216913] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 980.217747] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189808a7-cd50-4e63-856d-012313d5b8c1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.224902] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e1cc7f-5ce9-4966-8a6e-f30d5969bd5b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.235054] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd2753a-9c58-428a-9c43-cd2ac754b1b2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.264920] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea092ef1-b350-49ca-a24a-94e113c2e5b8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.271485] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c1fcc774-b56e-416e-93d5-6aefdd759e5f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.281330] env[62813]: DEBUG oslo_vmware.api [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Task: {'id': task-4267642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068227} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.281585] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.281771] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 980.281951] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 980.282147] env[62813]: INFO nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Took 0.61 seconds to destroy the instance on the hypervisor. [ 980.284357] env[62813]: DEBUG nova.compute.claims [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 980.284552] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.284782] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.294718] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 980.354219] env[62813]: DEBUG oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 980.417575] env[62813]: DEBUG oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 980.418057] env[62813]: DEBUG oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 980.712431] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d4a2d5-7db1-4d4d-86bb-dcbeb4a1efb1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.720776] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5cbfdd-b3e6-4682-a8c3-d7478ecca126 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.753476] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0446e17a-c541-4bff-ab5d-772f95c4b050 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.761520] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06c2ba1-5aa7-4370-aad0-b0055520344f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.775056] env[62813]: DEBUG nova.compute.provider_tree [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.784051] env[62813]: DEBUG nova.scheduler.client.report [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.798233] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.513s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.798963] env[62813]: ERROR nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 980.798963] env[62813]: Faults: ['InvalidArgument'] [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Traceback (most recent call last): [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self.driver.spawn(context, instance, image_meta, [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self._fetch_image_if_missing(context, vi) [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] image_cache(vi, tmp_image_ds_loc) [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] vm_util.copy_virtual_disk( [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] session._wait_for_task(vmdk_copy_task) [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] return self.wait_for_task(task_ref) [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] return evt.wait() [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] result = hub.switch() [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] return self.greenlet.switch() [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] self.f(*self.args, **self.kw) [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] raise exceptions.translate_fault(task_info.error) [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Faults: ['InvalidArgument'] [ 980.798963] env[62813]: ERROR nova.compute.manager [instance: 09aa702f-a28c-429b-83d9-378be8606a29] [ 980.799921] env[62813]: DEBUG nova.compute.utils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 980.801472] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Build of instance 09aa702f-a28c-429b-83d9-378be8606a29 was re-scheduled: A specified parameter was not correct: fileType [ 980.801472] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 980.801851] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 980.802041] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 980.802219] env[62813]: DEBUG nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 980.802385] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 981.376634] env[62813]: DEBUG nova.network.neutron [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.389590] env[62813]: INFO nova.compute.manager [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Took 0.59 seconds to deallocate network for instance. [ 981.504715] env[62813]: INFO nova.scheduler.client.report [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Deleted allocations for instance 09aa702f-a28c-429b-83d9-378be8606a29 [ 981.526240] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0129a8b2-7f73-4ef3-b617-2d1d5467c895 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "09aa702f-a28c-429b-83d9-378be8606a29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 383.317s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.527356] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "09aa702f-a28c-429b-83d9-378be8606a29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 184.059s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.527729] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Acquiring lock "09aa702f-a28c-429b-83d9-378be8606a29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.527915] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "09aa702f-a28c-429b-83d9-378be8606a29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.528362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "09aa702f-a28c-429b-83d9-378be8606a29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.530419] env[62813]: INFO nova.compute.manager [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Terminating instance [ 981.532704] env[62813]: DEBUG nova.compute.manager [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 981.532781] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 981.533259] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65d8eff4-7876-427d-a9c5-a4b2853d1958 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.538797] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 981.545498] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db229d2d-5362-4aa4-ba63-4324cecd09bd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.577118] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 09aa702f-a28c-429b-83d9-378be8606a29 could not be found. [ 981.577362] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 981.577538] env[62813]: INFO nova.compute.manager [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Took 0.04 seconds to destroy the instance on the hypervisor. [ 981.577787] env[62813]: DEBUG oslo.service.loopingcall [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.578789] env[62813]: DEBUG nova.compute.manager [-] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 981.578789] env[62813]: DEBUG nova.network.neutron [-] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 981.595721] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.595968] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.597572] env[62813]: INFO nova.compute.claims [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.611956] env[62813]: DEBUG nova.network.neutron [-] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.632291] env[62813]: INFO nova.compute.manager [-] [instance: 09aa702f-a28c-429b-83d9-378be8606a29] Took 0.05 seconds to deallocate network for instance. [ 981.748553] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2b17c98a-e168-49b0-beb7-b0c7162b47f4 tempest-AttachInterfacesUnderV243Test-321837802 tempest-AttachInterfacesUnderV243Test-321837802-project-member] Lock "09aa702f-a28c-429b-83d9-378be8606a29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.962482] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dfa314-2383-4066-86f3-aac69eaea4b7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.970814] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829ca8a0-0ba7-4fa7-badd-0e7559153682 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.000570] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aefd9f3-b771-439d-8b6a-c6a75b0736e1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.008582] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d971854-982a-4f1d-b47a-c62321105698 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.023051] env[62813]: DEBUG nova.compute.provider_tree [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.031859] env[62813]: DEBUG nova.scheduler.client.report [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.046540] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.450s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.047041] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 982.083207] env[62813]: DEBUG nova.compute.utils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.084646] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 982.084818] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 982.093905] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 982.143937] env[62813]: DEBUG nova.policy [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e99df797faa4a24807f9b364ee57cf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e76396eac1da4fe1959e7f4286bf108b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 982.163046] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 982.190013] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 982.190284] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 982.190448] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.190633] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 982.190946] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.191157] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 982.191386] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 982.191556] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 982.191744] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 982.191900] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 982.192096] env[62813]: DEBUG nova.virt.hardware [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 982.192992] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7441b9-fc88-4fca-9bbd-9d2450e89885 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.202347] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d5d9dd-c4fb-41e5-b5a9-30ce67232c4e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.671826] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Successfully created port: 8d8aa618-2d23-4e48-9fe8-5577ded9e4ff {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.611682] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Successfully updated port: 8d8aa618-2d23-4e48-9fe8-5577ded9e4ff {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.625473] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "refresh_cache-1d8d7576-935b-4f51-8475-fe09aad4ea7c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.625983] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquired lock "refresh_cache-1d8d7576-935b-4f51-8475-fe09aad4ea7c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.626547] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.680522] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 983.813220] env[62813]: DEBUG nova.compute.manager [req-be8222be-5554-40f7-801a-1bca3f0b7fb2 req-a1da6037-9853-43f9-ad77-d7cb90e796e2 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Received event network-vif-plugged-8d8aa618-2d23-4e48-9fe8-5577ded9e4ff {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 983.814433] env[62813]: DEBUG oslo_concurrency.lockutils [req-be8222be-5554-40f7-801a-1bca3f0b7fb2 req-a1da6037-9853-43f9-ad77-d7cb90e796e2 service nova] Acquiring lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.814730] env[62813]: DEBUG oslo_concurrency.lockutils [req-be8222be-5554-40f7-801a-1bca3f0b7fb2 req-a1da6037-9853-43f9-ad77-d7cb90e796e2 service nova] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.814923] env[62813]: DEBUG oslo_concurrency.lockutils [req-be8222be-5554-40f7-801a-1bca3f0b7fb2 req-a1da6037-9853-43f9-ad77-d7cb90e796e2 service nova] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.815253] env[62813]: DEBUG nova.compute.manager [req-be8222be-5554-40f7-801a-1bca3f0b7fb2 req-a1da6037-9853-43f9-ad77-d7cb90e796e2 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] No waiting events found dispatching network-vif-plugged-8d8aa618-2d23-4e48-9fe8-5577ded9e4ff {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.816252] env[62813]: WARNING nova.compute.manager [req-be8222be-5554-40f7-801a-1bca3f0b7fb2 req-a1da6037-9853-43f9-ad77-d7cb90e796e2 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Received unexpected event network-vif-plugged-8d8aa618-2d23-4e48-9fe8-5577ded9e4ff for instance with vm_state building and task_state spawning. [ 983.942164] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Updating instance_info_cache with network_info: [{"id": "8d8aa618-2d23-4e48-9fe8-5577ded9e4ff", "address": "fa:16:3e:ed:88:68", "network": {"id": "32bcb1ba-4c05-4d23-b76c-6e909cf4dae6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1855899701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e76396eac1da4fe1959e7f4286bf108b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d8aa618-2d", "ovs_interfaceid": "8d8aa618-2d23-4e48-9fe8-5577ded9e4ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.957105] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Releasing lock "refresh_cache-1d8d7576-935b-4f51-8475-fe09aad4ea7c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.957420] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance network_info: |[{"id": "8d8aa618-2d23-4e48-9fe8-5577ded9e4ff", "address": "fa:16:3e:ed:88:68", "network": {"id": "32bcb1ba-4c05-4d23-b76c-6e909cf4dae6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1855899701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e76396eac1da4fe1959e7f4286bf108b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d8aa618-2d", "ovs_interfaceid": "8d8aa618-2d23-4e48-9fe8-5577ded9e4ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 983.958131] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:88:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d8aa618-2d23-4e48-9fe8-5577ded9e4ff', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.973845] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Creating folder: Project (e76396eac1da4fe1959e7f4286bf108b). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 983.974885] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8cf5801-4f90-492a-86f3-fea4f43d17a5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.988203] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Created folder: Project (e76396eac1da4fe1959e7f4286bf108b) in parent group-v840812. [ 983.988203] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Creating folder: Instances. Parent ref: group-v840866. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 983.988658] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcfae59e-b1f8-474d-9c03-5b20281b9f9b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.998613] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Created folder: Instances in parent group-v840866. [ 983.998863] env[62813]: DEBUG oslo.service.loopingcall [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.999092] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 983.999436] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-767ddf77-d8c0-4ca0-bf26-86065d8e9a3f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.020725] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.020725] env[62813]: value = "task-4267645" [ 984.020725] env[62813]: _type = "Task" [ 984.020725] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.030249] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267645, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.541860] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267645, 'name': CreateVM_Task, 'duration_secs': 0.427086} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.542129] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 984.542908] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.543090] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.543593] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.544486] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7e570dd-5a28-4a39-ba83-d34283dca568 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.551042] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Waiting for the task: (returnval){ [ 984.551042] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5297b39e-cd74-7b9b-6b0b-1328faf6ff0e" [ 984.551042] env[62813]: _type = "Task" [ 984.551042] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.560655] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5297b39e-cd74-7b9b-6b0b-1328faf6ff0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.063193] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.063193] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.063478] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.880256] env[62813]: DEBUG nova.compute.manager [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Received event network-changed-8d8aa618-2d23-4e48-9fe8-5577ded9e4ff {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 985.880256] env[62813]: DEBUG nova.compute.manager [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Refreshing instance network info cache due to event network-changed-8d8aa618-2d23-4e48-9fe8-5577ded9e4ff. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 985.880256] env[62813]: DEBUG oslo_concurrency.lockutils [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] Acquiring lock "refresh_cache-1d8d7576-935b-4f51-8475-fe09aad4ea7c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.880256] env[62813]: DEBUG oslo_concurrency.lockutils [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] Acquired lock "refresh_cache-1d8d7576-935b-4f51-8475-fe09aad4ea7c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.880256] env[62813]: DEBUG nova.network.neutron [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Refreshing network info cache for port 8d8aa618-2d23-4e48-9fe8-5577ded9e4ff {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.257876] env[62813]: DEBUG nova.network.neutron [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Updated VIF entry in instance network info cache for port 8d8aa618-2d23-4e48-9fe8-5577ded9e4ff. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 986.258266] env[62813]: DEBUG nova.network.neutron [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Updating instance_info_cache with network_info: [{"id": "8d8aa618-2d23-4e48-9fe8-5577ded9e4ff", "address": "fa:16:3e:ed:88:68", "network": {"id": "32bcb1ba-4c05-4d23-b76c-6e909cf4dae6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1855899701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e76396eac1da4fe1959e7f4286bf108b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d8aa618-2d", "ovs_interfaceid": "8d8aa618-2d23-4e48-9fe8-5577ded9e4ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.269083] env[62813]: DEBUG oslo_concurrency.lockutils [req-393eab99-d972-47bc-a461-6a32660ea6ff req-37d7ff3f-1fa3-4bf1-8eac-045cc76f0136 service nova] Releasing lock "refresh_cache-1d8d7576-935b-4f51-8475-fe09aad4ea7c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.554495] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.521813] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "e6442505-b5d0-4736-a24a-41fccda6da6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.522089] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.570317] env[62813]: DEBUG oslo_concurrency.lockutils [None req-013c758e-ef81-4bd1-b37a-8073f35f6878 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] Acquiring lock "f0d3a02d-ddb9-4338-989e-e256fb50ede5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.570317] env[62813]: DEBUG oslo_concurrency.lockutils [None req-013c758e-ef81-4bd1-b37a-8073f35f6878 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] Lock "f0d3a02d-ddb9-4338-989e-e256fb50ede5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.620379] env[62813]: DEBUG oslo_concurrency.lockutils [None req-743bab17-50f4-4541-aa15-e4dfa42d6a92 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] Acquiring lock "e261660b-b9ee-487c-b044-ce1325c8e2ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.620723] env[62813]: DEBUG oslo_concurrency.lockutils [None req-743bab17-50f4-4541-aa15-e4dfa42d6a92 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] Lock "e261660b-b9ee-487c-b044-ce1325c8e2ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.279620] env[62813]: DEBUG oslo_concurrency.lockutils [None req-741b79bd-e8f1-4427-afb5-ff8252feca99 tempest-ServerTagsTestJSON-1563933505 tempest-ServerTagsTestJSON-1563933505-project-member] Acquiring lock "b042b18a-4efb-431a-afa6-f4dc8b4c1bd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.279888] env[62813]: DEBUG oslo_concurrency.lockutils [None req-741b79bd-e8f1-4427-afb5-ff8252feca99 tempest-ServerTagsTestJSON-1563933505 tempest-ServerTagsTestJSON-1563933505-project-member] Lock "b042b18a-4efb-431a-afa6-f4dc8b4c1bd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.933776] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1013.933776] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1013.935368] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1015.165791] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.166527] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.465266] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f2a95070-73a4-40ef-bae4-114289d18933 tempest-ServerAddressesTestJSON-559922155 tempest-ServerAddressesTestJSON-559922155-project-member] Acquiring lock "4de44f46-3872-46f4-afb4-308cc8b18c89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.465546] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f2a95070-73a4-40ef-bae4-114289d18933 tempest-ServerAddressesTestJSON-559922155 tempest-ServerAddressesTestJSON-559922155-project-member] Lock "4de44f46-3872-46f4-afb4-308cc8b18c89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.159207] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.188922] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.163848] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.164126] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.164376] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1017.164717] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1017.186777] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.186987] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.187690] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.187966] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188128] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188260] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188395] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188518] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188640] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188761] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1017.188880] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1018.164778] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.164129] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.176799] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.177133] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.177211] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.177369] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1019.178598] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771d206c-634f-4ad3-8a76-fcff884de686 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.193184] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8177db-7a6c-4cb6-af94-b19c6a2e36af {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.212708] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7858f4ce-5a3b-4d29-888f-cb7bec68c1ae {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.220402] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d910f0ea-4284-4776-828b-9a8fcfd9f480 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.252640] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180768MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1019.252814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.252979] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.339270] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.339462] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance aa76585b-55a8-437c-8dea-7731d85a3b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.340478] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.340715] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.340874] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.341018] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.341199] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.341353] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.341491] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.341625] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1019.361760] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 40655a50-5c68-4141-be93-f7a39aa5a168 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.376493] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.388804] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6ed29c0d-710c-4f2f-b321-bbd8d253f918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.401172] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 90f047bb-b344-445b-906e-ca8efedf6f60 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.412699] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 419ed7f3-e302-4b62-965c-d12dc88ff2c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.427842] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 69c52191-e779-4bb4-b3aa-f39c8a70450b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.442852] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f424eed0-7af3-45e7-b451-ddd6c23871f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.457330] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 989c0e58-b997-44c5-bc4f-759a30fbbfe3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.471113] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance ea2f91dc-1762-420a-90dc-c2e32811d911 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.482221] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a564338e-81c2-4b17-a507-081f6a57e190 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.493458] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.505774] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e9e201ea-9561-483b-a39e-6180fc6f5a2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.516473] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 287a3859-9150-414f-a4ef-2ba3af8edc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.529043] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.543205] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.557706] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.571548] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f0d3a02d-ddb9-4338-989e-e256fb50ede5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.583662] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e261660b-b9ee-487c-b044-ce1325c8e2ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.595618] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b042b18a-4efb-431a-afa6-f4dc8b4c1bd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.606657] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4de44f46-3872-46f4-afb4-308cc8b18c89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.606986] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1019.607081] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1020.058016] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c3318d-2c0b-4a55-8489-11cbe4b1e1b4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.066926] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d76c55-c7c6-497d-ad5d-4b3ab5febfd3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.100120] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9965a72c-8ece-41f8-b9e5-f014c388aaab {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.108822] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e896204b-096f-4bee-a5d1-6481baf4c0f9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.123108] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.132268] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.155402] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1020.155601] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.903s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.861322] env[62813]: DEBUG oslo_concurrency.lockutils [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.589403] env[62813]: WARNING oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1025.589403] env[62813]: ERROR oslo_vmware.rw_handles [ 1025.589403] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1025.591827] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1025.591827] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Copying Virtual Disk [datastore2] vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/d0d344fa-a419-4970-b9c7-558e77e1f132/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1025.591996] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c07c91b-fc97-461f-b832-8286dba9ee97 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.603695] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 1025.603695] env[62813]: value = "task-4267646" [ 1025.603695] env[62813]: _type = "Task" [ 1025.603695] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.616604] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': task-4267646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.118324] env[62813]: DEBUG oslo_vmware.exceptions [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1026.119340] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.121550] env[62813]: ERROR nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.121550] env[62813]: Faults: ['InvalidArgument'] [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Traceback (most recent call last): [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] yield resources [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self.driver.spawn(context, instance, image_meta, [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self._fetch_image_if_missing(context, vi) [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] image_cache(vi, tmp_image_ds_loc) [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] vm_util.copy_virtual_disk( [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] session._wait_for_task(vmdk_copy_task) [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] return self.wait_for_task(task_ref) [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] return evt.wait() [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] result = hub.switch() [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] return self.greenlet.switch() [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self.f(*self.args, **self.kw) [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] raise exceptions.translate_fault(task_info.error) [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Faults: ['InvalidArgument'] [ 1026.121550] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] [ 1026.125774] env[62813]: INFO nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Terminating instance [ 1026.125774] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.125774] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.126388] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1026.126597] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.126780] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31767aba-9d3b-4249-99ec-880d6b9b6e91 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.130139] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9730e112-a919-40dd-a15f-f22aad01d9e5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.137934] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.138220] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1148383-2595-4b98-9940-f5115c50f815 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.143016] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.143272] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1026.144622] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0111b86a-4315-4448-97f0-add66ee0306f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.150548] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 1026.150548] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52bbf3c4-6875-f70d-04de-952fa3bd34d0" [ 1026.150548] env[62813]: _type = "Task" [ 1026.150548] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.160997] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52bbf3c4-6875-f70d-04de-952fa3bd34d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.214145] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1026.214454] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1026.214649] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Deleting the datastore file [datastore2] 9a448d2b-0dee-4a90-b131-e6ada542f342 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.214926] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1dd2164-2bc1-45d8-82e4-0f1b4dfb0a53 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.222881] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 1026.222881] env[62813]: value = "task-4267648" [ 1026.222881] env[62813]: _type = "Task" [ 1026.222881] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.235148] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': task-4267648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.661795] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1026.662075] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating directory with path [datastore2] vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.664203] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9889931b-34ec-497b-bf17-471f451e2c10 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.676172] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Created directory with path [datastore2] vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.676453] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Fetch image to [datastore2] vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1026.676783] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1026.677493] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31494d7f-d549-4282-b0e9-dbbf21f0332f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.685536] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1787a0ca-3671-47d8-8bf8-e97c4bef5107 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.696467] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9717b0-0a59-4d0c-b2d9-b66c9fecadd8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.734611] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cfa556-bc7f-4a65-9030-8afca19a7ce5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.746777] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c236d493-6ff7-41f5-8f93-3bf5ab494bd0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.747205] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': task-4267648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085252} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.747409] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.747580] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1026.747818] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.747901] env[62813]: INFO nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1026.750090] env[62813]: DEBUG nova.compute.claims [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1026.750284] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.750502] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.770585] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1026.842197] env[62813]: DEBUG oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1026.911947] env[62813]: DEBUG oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1026.912190] env[62813]: DEBUG oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1027.279082] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95873eb-1157-4fb7-9662-2d18b35a0210 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.287783] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d36e08-728c-4665-acdb-11dc387fc21a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.319025] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e734721c-e24b-4268-a56a-3e615d6b1026 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.327941] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e282f4bb-b4e6-4e2f-bf01-bb883b255e80 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.342466] env[62813]: DEBUG nova.compute.provider_tree [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.353045] env[62813]: DEBUG nova.scheduler.client.report [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.374885] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.624s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.375429] env[62813]: ERROR nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1027.375429] env[62813]: Faults: ['InvalidArgument'] [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Traceback (most recent call last): [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self.driver.spawn(context, instance, image_meta, [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self._fetch_image_if_missing(context, vi) [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] image_cache(vi, tmp_image_ds_loc) [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] vm_util.copy_virtual_disk( [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] session._wait_for_task(vmdk_copy_task) [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] return self.wait_for_task(task_ref) [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] return evt.wait() [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] result = hub.switch() [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] return self.greenlet.switch() [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] self.f(*self.args, **self.kw) [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] raise exceptions.translate_fault(task_info.error) [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Faults: ['InvalidArgument'] [ 1027.375429] env[62813]: ERROR nova.compute.manager [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] [ 1027.376547] env[62813]: DEBUG nova.compute.utils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1027.378034] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Build of instance 9a448d2b-0dee-4a90-b131-e6ada542f342 was re-scheduled: A specified parameter was not correct: fileType [ 1027.378034] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1027.378473] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1027.378657] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1027.378831] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1027.379038] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.837533] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.857333] env[62813]: INFO nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Took 0.48 seconds to deallocate network for instance. [ 1027.973977] env[62813]: INFO nova.scheduler.client.report [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Deleted allocations for instance 9a448d2b-0dee-4a90-b131-e6ada542f342 [ 1028.001491] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 427.027s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.003162] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 224.955s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.004122] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "9a448d2b-0dee-4a90-b131-e6ada542f342-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.004122] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.004122] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.006813] env[62813]: INFO nova.compute.manager [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Terminating instance [ 1028.008994] env[62813]: DEBUG nova.compute.manager [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1028.009255] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1028.009583] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f70a4d44-5957-45c7-b7b6-b6387813bb36 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.020177] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807e2c2b-ece5-413f-ada3-3988f613b2a6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.032606] env[62813]: DEBUG nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1028.057498] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9a448d2b-0dee-4a90-b131-e6ada542f342 could not be found. [ 1028.057775] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1028.057985] env[62813]: INFO nova.compute.manager [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1028.058372] env[62813]: DEBUG oslo.service.loopingcall [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.058639] env[62813]: DEBUG nova.compute.manager [-] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1028.058853] env[62813]: DEBUG nova.network.neutron [-] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.093554] env[62813]: DEBUG nova.network.neutron [-] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.099372] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.099664] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.101336] env[62813]: INFO nova.compute.claims [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.104411] env[62813]: INFO nova.compute.manager [-] [instance: 9a448d2b-0dee-4a90-b131-e6ada542f342] Took 0.05 seconds to deallocate network for instance. [ 1028.224252] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c8236a88-ea4b-4142-be9d-ddb4eb1f9fbc tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "9a448d2b-0dee-4a90-b131-e6ada542f342" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.266688] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "40655a50-5c68-4141-be93-f7a39aa5a168" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.537908] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f890b8f-d06e-44e3-9bfc-1a63c6191ec9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.545955] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68138778-3b59-43e9-8ec3-42a86117df3f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.580302] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff00ae34-d6d3-40d0-b70f-dc12bbd7e5fd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.589116] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0ea850-aaba-451e-abe5-165ea1315d37 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.602939] env[62813]: DEBUG nova.compute.provider_tree [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.611816] env[62813]: DEBUG nova.scheduler.client.report [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.629054] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.529s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.629556] env[62813]: DEBUG nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1028.664755] env[62813]: DEBUG nova.compute.claims [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1028.664982] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.665264] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.074245] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac13056c-5c32-4a2d-8cd8-358934382d7c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.082250] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13788e21-0acd-406a-b60c-4bce76e62d40 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.113087] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5214bb4-d110-46ea-8972-2669803c05e7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.121144] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b994a1a8-b602-4710-a9db-55495ee83704 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.134956] env[62813]: DEBUG nova.compute.provider_tree [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.144869] env[62813]: DEBUG nova.scheduler.client.report [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.161914] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.497s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.162697] env[62813]: DEBUG nova.compute.utils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Conflict updating instance 40655a50-5c68-4141-be93-f7a39aa5a168. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1029.164222] env[62813]: DEBUG nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance disappeared during build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 1029.164394] env[62813]: DEBUG nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1029.164613] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "refresh_cache-40655a50-5c68-4141-be93-f7a39aa5a168" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.164761] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquired lock "refresh_cache-40655a50-5c68-4141-be93-f7a39aa5a168" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.164923] env[62813]: DEBUG nova.network.neutron [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.209193] env[62813]: DEBUG nova.network.neutron [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.419522] env[62813]: DEBUG nova.network.neutron [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.428855] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Releasing lock "refresh_cache-40655a50-5c68-4141-be93-f7a39aa5a168" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.429106] env[62813]: DEBUG nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1029.429294] env[62813]: DEBUG nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1029.429468] env[62813]: DEBUG nova.network.neutron [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1029.447495] env[62813]: DEBUG nova.network.neutron [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.455051] env[62813]: DEBUG nova.network.neutron [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.463445] env[62813]: INFO nova.compute.manager [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Took 0.03 seconds to deallocate network for instance. [ 1029.546994] env[62813]: INFO nova.scheduler.client.report [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Deleted allocations for instance 40655a50-5c68-4141-be93-f7a39aa5a168 [ 1029.547313] env[62813]: DEBUG oslo_concurrency.lockutils [None req-35199916-3cd9-44e9-8495-18abc5cb87be tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "40655a50-5c68-4141-be93-f7a39aa5a168" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.344s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.548885] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "40655a50-5c68-4141-be93-f7a39aa5a168" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.282s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.549123] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "40655a50-5c68-4141-be93-f7a39aa5a168-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.549333] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "40655a50-5c68-4141-be93-f7a39aa5a168-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.549505] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "40655a50-5c68-4141-be93-f7a39aa5a168-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.551379] env[62813]: INFO nova.compute.manager [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Terminating instance [ 1029.553021] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquiring lock "refresh_cache-40655a50-5c68-4141-be93-f7a39aa5a168" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.553184] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Acquired lock "refresh_cache-40655a50-5c68-4141-be93-f7a39aa5a168" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.553352] env[62813]: DEBUG nova.network.neutron [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.560983] env[62813]: DEBUG nova.compute.manager [None req-6425a6a1-1253-4318-adce-52a7ba85b69a tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 7f7b3544-6e9f-493a-b190-537d6c3b7979] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1029.585116] env[62813]: DEBUG nova.compute.manager [None req-6425a6a1-1253-4318-adce-52a7ba85b69a tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 7f7b3544-6e9f-493a-b190-537d6c3b7979] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1029.591928] env[62813]: DEBUG nova.network.neutron [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.608537] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6425a6a1-1253-4318-adce-52a7ba85b69a tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "7f7b3544-6e9f-493a-b190-537d6c3b7979" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.412s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.620941] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1029.692914] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.693338] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.695826] env[62813]: INFO nova.compute.claims [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.739975] env[62813]: DEBUG nova.network.neutron [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.752173] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Releasing lock "refresh_cache-40655a50-5c68-4141-be93-f7a39aa5a168" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.752591] env[62813]: DEBUG nova.compute.manager [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1029.752862] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1029.753469] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3907bc16-2537-46a5-b1dc-4856c42c4407 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.769247] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacee9b6-885a-4c95-b4e7-2d19a03ca515 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.803712] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40655a50-5c68-4141-be93-f7a39aa5a168 could not be found. [ 1029.803931] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1029.804205] env[62813]: INFO nova.compute.manager [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1029.804376] env[62813]: DEBUG oslo.service.loopingcall [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.807194] env[62813]: DEBUG nova.compute.manager [-] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1029.807306] env[62813]: DEBUG nova.network.neutron [-] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1029.828026] env[62813]: DEBUG nova.network.neutron [-] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.837781] env[62813]: DEBUG nova.network.neutron [-] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.847865] env[62813]: INFO nova.compute.manager [-] [instance: 40655a50-5c68-4141-be93-f7a39aa5a168] Took 0.04 seconds to deallocate network for instance. [ 1029.960965] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d2d8cdc-2294-4b3f-8869-feaccb8564d4 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488 tempest-FloatingIPsAssociationNegativeTestJSON-1880432488-project-member] Lock "40655a50-5c68-4141-be93-f7a39aa5a168" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.412s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.191727] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5302003e-8852-4f51-ac47-26530ab163f8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.200141] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8cc617-c139-47e8-86ea-153e488c58ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.231153] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b4bbee-6b6a-4a8e-991d-5329bb6932a4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.239215] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df6f5e4-d01e-4163-879d-67225cd3747a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.253324] env[62813]: DEBUG nova.compute.provider_tree [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.263191] env[62813]: DEBUG nova.scheduler.client.report [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1030.282719] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.589s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.283363] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1030.324089] env[62813]: DEBUG nova.compute.utils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.325549] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1030.325874] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1030.336814] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1030.420721] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1030.424683] env[62813]: DEBUG nova.policy [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4bd8b31bd823423a9f5bd8da790efa8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68387d255abd49a8b92a50de113cdf66', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1030.448095] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1030.448417] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1030.448557] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.448745] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1030.448895] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.449069] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1030.449290] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1030.449596] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1030.449640] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1030.449770] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1030.449944] env[62813]: DEBUG nova.virt.hardware [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1030.451112] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b903e60d-0181-4a6a-9c05-bb296b828fde {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.461031] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25cae10-888e-4b2a-a4e0-ee145c800fc0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.791055] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Successfully created port: 83333a27-b647-4ebb-9ee3-dd9b63102205 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.977958] env[62813]: DEBUG nova.compute.manager [req-503f864d-dbff-46c5-8298-ea0e938a4c34 req-cb9258c5-b6f7-41c3-a2a0-7736e521ebf2 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Received event network-vif-plugged-83333a27-b647-4ebb-9ee3-dd9b63102205 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1031.979316] env[62813]: DEBUG oslo_concurrency.lockutils [req-503f864d-dbff-46c5-8298-ea0e938a4c34 req-cb9258c5-b6f7-41c3-a2a0-7736e521ebf2 service nova] Acquiring lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.979316] env[62813]: DEBUG oslo_concurrency.lockutils [req-503f864d-dbff-46c5-8298-ea0e938a4c34 req-cb9258c5-b6f7-41c3-a2a0-7736e521ebf2 service nova] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.979316] env[62813]: DEBUG oslo_concurrency.lockutils [req-503f864d-dbff-46c5-8298-ea0e938a4c34 req-cb9258c5-b6f7-41c3-a2a0-7736e521ebf2 service nova] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.979316] env[62813]: DEBUG nova.compute.manager [req-503f864d-dbff-46c5-8298-ea0e938a4c34 req-cb9258c5-b6f7-41c3-a2a0-7736e521ebf2 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] No waiting events found dispatching network-vif-plugged-83333a27-b647-4ebb-9ee3-dd9b63102205 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1031.979316] env[62813]: WARNING nova.compute.manager [req-503f864d-dbff-46c5-8298-ea0e938a4c34 req-cb9258c5-b6f7-41c3-a2a0-7736e521ebf2 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Received unexpected event network-vif-plugged-83333a27-b647-4ebb-9ee3-dd9b63102205 for instance with vm_state building and task_state spawning. [ 1032.059584] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.108415] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Successfully updated port: 83333a27-b647-4ebb-9ee3-dd9b63102205 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.120273] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.120469] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquired lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.120629] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.222444] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1032.618686] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Updating instance_info_cache with network_info: [{"id": "83333a27-b647-4ebb-9ee3-dd9b63102205", "address": "fa:16:3e:a8:0e:90", "network": {"id": "dc5840ff-d1b4-4671-9964-3525a1328a5d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1250215899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68387d255abd49a8b92a50de113cdf66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83333a27-b6", "ovs_interfaceid": "83333a27-b647-4ebb-9ee3-dd9b63102205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.638034] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Releasing lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.638139] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance network_info: |[{"id": "83333a27-b647-4ebb-9ee3-dd9b63102205", "address": "fa:16:3e:a8:0e:90", "network": {"id": "dc5840ff-d1b4-4671-9964-3525a1328a5d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1250215899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68387d255abd49a8b92a50de113cdf66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83333a27-b6", "ovs_interfaceid": "83333a27-b647-4ebb-9ee3-dd9b63102205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1032.638704] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:0e:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4b43a78-f49b-4132-ab2e-6e28769a9498', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83333a27-b647-4ebb-9ee3-dd9b63102205', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.646568] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Creating folder: Project (68387d255abd49a8b92a50de113cdf66). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1032.647207] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be807365-226c-41ce-b4f7-5016b5bdc926 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.659677] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Created folder: Project (68387d255abd49a8b92a50de113cdf66) in parent group-v840812. [ 1032.659882] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Creating folder: Instances. Parent ref: group-v840872. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1032.660162] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a636070a-601e-42d7-b58a-7422e0cfee1a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.670742] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Created folder: Instances in parent group-v840872. [ 1032.671075] env[62813]: DEBUG oslo.service.loopingcall [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.671234] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1032.671766] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-166e4db1-ed1a-4fd9-bff9-aebabdf381e0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.693334] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.693334] env[62813]: value = "task-4267656" [ 1032.693334] env[62813]: _type = "Task" [ 1032.693334] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.702556] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267656, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.205706] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267656, 'name': CreateVM_Task, 'duration_secs': 0.327176} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.206201] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1033.206965] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.207862] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.207862] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1033.207992] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53f92f61-1ef9-4154-bb79-92012b9aeac6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.213720] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Waiting for the task: (returnval){ [ 1033.213720] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]526380b2-40db-17f7-743f-5f279fe4bdf1" [ 1033.213720] env[62813]: _type = "Task" [ 1033.213720] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.222527] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]526380b2-40db-17f7-743f-5f279fe4bdf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.725795] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.725795] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.726026] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.218772] env[62813]: DEBUG nova.compute.manager [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Received event network-changed-83333a27-b647-4ebb-9ee3-dd9b63102205 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1034.219052] env[62813]: DEBUG nova.compute.manager [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Refreshing instance network info cache due to event network-changed-83333a27-b647-4ebb-9ee3-dd9b63102205. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1034.219209] env[62813]: DEBUG oslo_concurrency.lockutils [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] Acquiring lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.219356] env[62813]: DEBUG oslo_concurrency.lockutils [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] Acquired lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.219518] env[62813]: DEBUG nova.network.neutron [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Refreshing network info cache for port 83333a27-b647-4ebb-9ee3-dd9b63102205 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1034.676894] env[62813]: DEBUG nova.network.neutron [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Updated VIF entry in instance network info cache for port 83333a27-b647-4ebb-9ee3-dd9b63102205. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1034.677382] env[62813]: DEBUG nova.network.neutron [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Updating instance_info_cache with network_info: [{"id": "83333a27-b647-4ebb-9ee3-dd9b63102205", "address": "fa:16:3e:a8:0e:90", "network": {"id": "dc5840ff-d1b4-4671-9964-3525a1328a5d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1250215899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68387d255abd49a8b92a50de113cdf66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83333a27-b6", "ovs_interfaceid": "83333a27-b647-4ebb-9ee3-dd9b63102205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.692718] env[62813]: DEBUG oslo_concurrency.lockutils [req-d4361820-3b19-4245-adb8-53723058fd5e req-5d636654-beb2-4a41-a60d-0f7430c6e372 service nova] Releasing lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.292447] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "5655255a-1d03-4854-b8ad-d77643f9b9c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.292764] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.903062] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d567675f-9f2e-4f11-9227-89e31afb0468 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "a074bbe0-a497-4aab-93f2-9a9aa6140290" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.903327] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d567675f-9f2e-4f11-9227-89e31afb0468 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "a074bbe0-a497-4aab-93f2-9a9aa6140290" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.996243] env[62813]: DEBUG oslo_concurrency.lockutils [None req-54050b8f-279d-4d1a-b80d-85a1bdc9029b tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "4e00cefc-ffe9-41e4-9520-281d937e32ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.003019] env[62813]: DEBUG oslo_concurrency.lockutils [None req-54050b8f-279d-4d1a-b80d-85a1bdc9029b tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "4e00cefc-ffe9-41e4-9520-281d937e32ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.091280] env[62813]: DEBUG oslo_concurrency.lockutils [None req-11116210-719c-4d94-ab15-7d7ebd63737c tempest-ServerActionsTestOtherB-1764703617 tempest-ServerActionsTestOtherB-1764703617-project-member] Acquiring lock "ab1749b4-a87b-4bd2-bdad-b2db446f44d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.092056] env[62813]: DEBUG oslo_concurrency.lockutils [None req-11116210-719c-4d94-ab15-7d7ebd63737c tempest-ServerActionsTestOtherB-1764703617 tempest-ServerActionsTestOtherB-1764703617-project-member] Lock "ab1749b4-a87b-4bd2-bdad-b2db446f44d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.970251] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d63da806-40b6-4bea-8de4-0f31634227fa tempest-FloatingIPsAssociationTestJSON-1590727866 tempest-FloatingIPsAssociationTestJSON-1590727866-project-member] Acquiring lock "81ac869e-c8ab-4f75-bfb7-bbb3296c24dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.970590] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d63da806-40b6-4bea-8de4-0f31634227fa tempest-FloatingIPsAssociationTestJSON-1590727866 tempest-FloatingIPsAssociationTestJSON-1590727866-project-member] Lock "81ac869e-c8ab-4f75-bfb7-bbb3296c24dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.505228] env[62813]: DEBUG oslo_concurrency.lockutils [None req-192f62c9-4564-48b3-b7cf-ba1a8d9ba39b tempest-ServersTestBootFromVolume-127445550 tempest-ServersTestBootFromVolume-127445550-project-member] Acquiring lock "6943412a-83f2-437b-80af-4a2de7ed5029" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.505541] env[62813]: DEBUG oslo_concurrency.lockutils [None req-192f62c9-4564-48b3-b7cf-ba1a8d9ba39b tempest-ServersTestBootFromVolume-127445550 tempest-ServersTestBootFromVolume-127445550-project-member] Lock "6943412a-83f2-437b-80af-4a2de7ed5029" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.532793] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bc3b853c-5d2d-4a0b-88d4-62fd016a765d tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Acquiring lock "3a85472c-25b9-4fb7-a438-84fa699d7f0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.532793] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bc3b853c-5d2d-4a0b-88d4-62fd016a765d tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "3a85472c-25b9-4fb7-a438-84fa699d7f0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.084419] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57e07797-9a6b-4bdf-923c-cd3c06b3edf6 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Acquiring lock "bae5a146-4946-4e03-a6f5-062e79a61def" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.084796] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57e07797-9a6b-4bdf-923c-cd3c06b3edf6 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "bae5a146-4946-4e03-a6f5-062e79a61def" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.156346] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.164019] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.164195] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1075.605810] env[62813]: WARNING oslo_vmware.rw_handles [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1075.605810] env[62813]: ERROR oslo_vmware.rw_handles [ 1075.606426] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1075.608285] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1075.608459] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Copying Virtual Disk [datastore2] vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/63074628-5988-49dd-a71d-15a4b30a8b86/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1075.608754] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-139dfbe9-5467-45c0-b167-89a04a056ccf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.617170] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 1075.617170] env[62813]: value = "task-4267662" [ 1075.617170] env[62813]: _type = "Task" [ 1075.617170] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.625929] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': task-4267662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.127908] env[62813]: DEBUG oslo_vmware.exceptions [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1076.128224] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.128782] env[62813]: ERROR nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1076.128782] env[62813]: Faults: ['InvalidArgument'] [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Traceback (most recent call last): [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] yield resources [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self.driver.spawn(context, instance, image_meta, [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self._fetch_image_if_missing(context, vi) [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] image_cache(vi, tmp_image_ds_loc) [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] vm_util.copy_virtual_disk( [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] session._wait_for_task(vmdk_copy_task) [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] return self.wait_for_task(task_ref) [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] return evt.wait() [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] result = hub.switch() [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] return self.greenlet.switch() [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self.f(*self.args, **self.kw) [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] raise exceptions.translate_fault(task_info.error) [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Faults: ['InvalidArgument'] [ 1076.128782] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] [ 1076.129737] env[62813]: INFO nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Terminating instance [ 1076.130814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.131099] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.131365] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1720139c-6857-4b87-855d-0350dbd2749f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.134623] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1076.134817] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1076.135578] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d92cf38-5ba6-4801-8fd9-c87bdad95165 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.144057] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1076.144281] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b8c943d-3b1c-4a94-b1e0-90223c73d2ef {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.146637] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.146813] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1076.147791] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15646e6a-f5cd-422c-833c-2858bf038a08 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.153058] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Waiting for the task: (returnval){ [ 1076.153058] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52beb16e-d11f-033e-7b19-f321923378cc" [ 1076.153058] env[62813]: _type = "Task" [ 1076.153058] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.160933] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52beb16e-d11f-033e-7b19-f321923378cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.163416] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.224827] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1076.225052] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1076.225173] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Deleting the datastore file [datastore2] aa76585b-55a8-437c-8dea-7731d85a3b82 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.225458] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86842f5e-e97b-41cf-a16a-c1b50dc90008 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.232267] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for the task: (returnval){ [ 1076.232267] env[62813]: value = "task-4267664" [ 1076.232267] env[62813]: _type = "Task" [ 1076.232267] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.240812] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': task-4267664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.664279] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1076.664663] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Creating directory with path [datastore2] vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.664808] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe7967ff-4eb0-4597-8eeb-9e3b2e040973 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.677354] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Created directory with path [datastore2] vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.677572] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Fetch image to [datastore2] vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1076.677747] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1076.678604] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2477feec-85a6-4aa6-bcf3-d8199f5a7400 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.687111] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3482cf9a-e211-4503-9975-6e5c2c660ca1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.697220] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623cef22-0e33-4ef0-a0f8-9ee0ee816061 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.731300] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1413180-63a5-4442-9f5a-ae34068b94b5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.744456] env[62813]: DEBUG oslo_vmware.api [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Task: {'id': task-4267664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065585} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.744693] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-55f0f25a-efcf-41d6-a69c-77e9d9b5fbd1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.746714] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.746918] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1076.747185] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1076.747328] env[62813]: INFO nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1076.749509] env[62813]: DEBUG nova.compute.claims [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1076.749694] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.749908] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.777272] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1076.835378] env[62813]: DEBUG oslo_vmware.rw_handles [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1076.895216] env[62813]: DEBUG oslo_vmware.rw_handles [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1076.895469] env[62813]: DEBUG oslo_vmware.rw_handles [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1077.163900] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.207562] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b7e8f9-b2f0-4c3c-a9e3-1bee991cdcc8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.216955] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867f8aef-366a-4ed4-87ca-07efc2b13e3f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.246861] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377688aa-fc2f-4697-b6fa-7683523cd746 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.255018] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed1ff7d-257b-45ce-ba31-32a410f860c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.268672] env[62813]: DEBUG nova.compute.provider_tree [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.278969] env[62813]: DEBUG nova.scheduler.client.report [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1077.295867] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.546s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.296491] env[62813]: ERROR nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1077.296491] env[62813]: Faults: ['InvalidArgument'] [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Traceback (most recent call last): [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self.driver.spawn(context, instance, image_meta, [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self._fetch_image_if_missing(context, vi) [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] image_cache(vi, tmp_image_ds_loc) [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] vm_util.copy_virtual_disk( [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] session._wait_for_task(vmdk_copy_task) [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] return self.wait_for_task(task_ref) [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] return evt.wait() [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] result = hub.switch() [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] return self.greenlet.switch() [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] self.f(*self.args, **self.kw) [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] raise exceptions.translate_fault(task_info.error) [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Faults: ['InvalidArgument'] [ 1077.296491] env[62813]: ERROR nova.compute.manager [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] [ 1077.297482] env[62813]: DEBUG nova.compute.utils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1077.298940] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Build of instance aa76585b-55a8-437c-8dea-7731d85a3b82 was re-scheduled: A specified parameter was not correct: fileType [ 1077.298940] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1077.299399] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1077.299604] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1077.299812] env[62813]: DEBUG nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1077.300090] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1077.910178] env[62813]: DEBUG nova.network.neutron [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.924315] env[62813]: INFO nova.compute.manager [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Took 0.62 seconds to deallocate network for instance. [ 1078.041755] env[62813]: INFO nova.scheduler.client.report [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Deleted allocations for instance aa76585b-55a8-437c-8dea-7731d85a3b82 [ 1078.063807] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae015b7c-7974-4992-8e4b-820f321c9a9c tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 477.041s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.063807] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 274.933s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.063807] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "aa76585b-55a8-437c-8dea-7731d85a3b82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.064324] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.064324] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.066810] env[62813]: INFO nova.compute.manager [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Terminating instance [ 1078.069051] env[62813]: DEBUG nova.compute.manager [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1078.070561] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1078.070561] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20e2f016-6cfe-47bc-956b-5eab866784bd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.080974] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dcae3c-b6e1-40a0-b44f-4fd6ecc3d34b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.093323] env[62813]: DEBUG nova.compute.manager [None req-6d123555-7b20-40f5-8382-3ddc88ff4b94 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 6ed29c0d-710c-4f2f-b321-bbd8d253f918] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.117880] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa76585b-55a8-437c-8dea-7731d85a3b82 could not be found. [ 1078.118159] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1078.118377] env[62813]: INFO nova.compute.manager [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1078.118638] env[62813]: DEBUG oslo.service.loopingcall [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.118896] env[62813]: DEBUG nova.compute.manager [-] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1078.118987] env[62813]: DEBUG nova.network.neutron [-] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1078.122351] env[62813]: DEBUG nova.compute.manager [None req-6d123555-7b20-40f5-8382-3ddc88ff4b94 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 6ed29c0d-710c-4f2f-b321-bbd8d253f918] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.148024] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d123555-7b20-40f5-8382-3ddc88ff4b94 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "6ed29c0d-710c-4f2f-b321-bbd8d253f918" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.136s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.151169] env[62813]: DEBUG nova.network.neutron [-] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.158715] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.160472] env[62813]: DEBUG nova.compute.manager [None req-5ef90781-9b78-4ea0-85ae-a263fa0f7e96 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 90f047bb-b344-445b-906e-ca8efedf6f60] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.164462] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.164807] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1078.164807] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1078.165887] env[62813]: INFO nova.compute.manager [-] [instance: aa76585b-55a8-437c-8dea-7731d85a3b82] Took 0.05 seconds to deallocate network for instance. [ 1078.186704] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.186870] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188123] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1078.188684] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1078.188807] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.188944] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.209846] env[62813]: DEBUG nova.compute.manager [None req-5ef90781-9b78-4ea0-85ae-a263fa0f7e96 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 90f047bb-b344-445b-906e-ca8efedf6f60] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.234647] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5ef90781-9b78-4ea0-85ae-a263fa0f7e96 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "90f047bb-b344-445b-906e-ca8efedf6f60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.176s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.247727] env[62813]: DEBUG nova.compute.manager [None req-6499534e-1e11-4dd1-943d-66d3082a4b33 tempest-ServersNegativeTestJSON-1827764029 tempest-ServersNegativeTestJSON-1827764029-project-member] [instance: 419ed7f3-e302-4b62-965c-d12dc88ff2c7] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.301027] env[62813]: DEBUG nova.compute.manager [None req-6499534e-1e11-4dd1-943d-66d3082a4b33 tempest-ServersNegativeTestJSON-1827764029 tempest-ServersNegativeTestJSON-1827764029-project-member] [instance: 419ed7f3-e302-4b62-965c-d12dc88ff2c7] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.309903] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bd1dd117-42c0-4704-9a70-c2e2ff9c81b4 tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "aa76585b-55a8-437c-8dea-7731d85a3b82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.246s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.326455] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6499534e-1e11-4dd1-943d-66d3082a4b33 tempest-ServersNegativeTestJSON-1827764029 tempest-ServersNegativeTestJSON-1827764029-project-member] Lock "419ed7f3-e302-4b62-965c-d12dc88ff2c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.295s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.338376] env[62813]: DEBUG nova.compute.manager [None req-e5ef1f17-54ae-45f9-acfe-e9672aac21ca tempest-TenantUsagesTestJSON-835618339 tempest-TenantUsagesTestJSON-835618339-project-member] [instance: 69c52191-e779-4bb4-b3aa-f39c8a70450b] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.370325] env[62813]: DEBUG nova.compute.manager [None req-e5ef1f17-54ae-45f9-acfe-e9672aac21ca tempest-TenantUsagesTestJSON-835618339 tempest-TenantUsagesTestJSON-835618339-project-member] [instance: 69c52191-e779-4bb4-b3aa-f39c8a70450b] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.397602] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e5ef1f17-54ae-45f9-acfe-e9672aac21ca tempest-TenantUsagesTestJSON-835618339 tempest-TenantUsagesTestJSON-835618339-project-member] Lock "69c52191-e779-4bb4-b3aa-f39c8a70450b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.578s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.412480] env[62813]: DEBUG nova.compute.manager [None req-2f4db042-0ec4-4d47-a6e8-2e40d8e89b23 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: f424eed0-7af3-45e7-b451-ddd6c23871f1] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.440368] env[62813]: DEBUG nova.compute.manager [None req-2f4db042-0ec4-4d47-a6e8-2e40d8e89b23 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: f424eed0-7af3-45e7-b451-ddd6c23871f1] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.462113] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2f4db042-0ec4-4d47-a6e8-2e40d8e89b23 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "f424eed0-7af3-45e7-b451-ddd6c23871f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.382s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.472074] env[62813]: DEBUG nova.compute.manager [None req-3434c3de-e3c4-4064-95ae-05e434e4b08d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: 989c0e58-b997-44c5-bc4f-759a30fbbfe3] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.499384] env[62813]: DEBUG nova.compute.manager [None req-3434c3de-e3c4-4064-95ae-05e434e4b08d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: 989c0e58-b997-44c5-bc4f-759a30fbbfe3] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.526382] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3434c3de-e3c4-4064-95ae-05e434e4b08d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "989c0e58-b997-44c5-bc4f-759a30fbbfe3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.343s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.537062] env[62813]: DEBUG nova.compute.manager [None req-ae934f32-2f05-40b0-9fc1-9945b203f114 tempest-ServerRescueTestJSONUnderV235-1256362219 tempest-ServerRescueTestJSONUnderV235-1256362219-project-member] [instance: ea2f91dc-1762-420a-90dc-c2e32811d911] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.561729] env[62813]: DEBUG nova.compute.manager [None req-ae934f32-2f05-40b0-9fc1-9945b203f114 tempest-ServerRescueTestJSONUnderV235-1256362219 tempest-ServerRescueTestJSONUnderV235-1256362219-project-member] [instance: ea2f91dc-1762-420a-90dc-c2e32811d911] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.584574] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ae934f32-2f05-40b0-9fc1-9945b203f114 tempest-ServerRescueTestJSONUnderV235-1256362219 tempest-ServerRescueTestJSONUnderV235-1256362219-project-member] Lock "ea2f91dc-1762-420a-90dc-c2e32811d911" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.199s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.594444] env[62813]: DEBUG nova.compute.manager [None req-eab368e8-3cde-4eaf-807e-231deb952ef9 tempest-ServerAddressesNegativeTestJSON-1449664058 tempest-ServerAddressesNegativeTestJSON-1449664058-project-member] [instance: a564338e-81c2-4b17-a507-081f6a57e190] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.618509] env[62813]: DEBUG nova.compute.manager [None req-eab368e8-3cde-4eaf-807e-231deb952ef9 tempest-ServerAddressesNegativeTestJSON-1449664058 tempest-ServerAddressesNegativeTestJSON-1449664058-project-member] [instance: a564338e-81c2-4b17-a507-081f6a57e190] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1078.640131] env[62813]: DEBUG oslo_concurrency.lockutils [None req-eab368e8-3cde-4eaf-807e-231deb952ef9 tempest-ServerAddressesNegativeTestJSON-1449664058 tempest-ServerAddressesNegativeTestJSON-1449664058-project-member] Lock "a564338e-81c2-4b17-a507-081f6a57e190" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.086s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.649116] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1078.708943] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.709221] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.710737] env[62813]: INFO nova.compute.claims [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.114269] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc76ed92-e8b6-47bf-b524-cfd0b70d8124 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.122957] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e060a1-2f59-497d-a4e2-24e681e00a26 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.156515] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344aa11c-ebf7-4e87-9465-197e2d788d63 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.164977] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe82be7-26a7-44d6-8684-2ce68dcdbbc3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.179504] env[62813]: DEBUG nova.compute.provider_tree [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.190066] env[62813]: DEBUG nova.scheduler.client.report [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1079.205561] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.496s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.205943] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1079.240016] env[62813]: DEBUG nova.compute.utils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1079.241924] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1079.242103] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1079.250479] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1079.318534] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1079.336147] env[62813]: DEBUG nova.policy [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eb076f2aa7246cf8da5c366366fe800', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9861046b5b014731a1b828ac24a57e87', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1079.345743] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1079.346073] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1079.346211] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.346439] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1079.346547] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.346697] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1079.346910] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1079.347131] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1079.347320] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1079.347497] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1079.347730] env[62813]: DEBUG nova.virt.hardware [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1079.348645] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5617fc61-f6ee-405c-a5d9-46f6d84b6127 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.357735] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a6b093-3752-46e7-9803-e5408574aab2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.751097] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Successfully created port: 13425f26-721b-4f19-b434-ee2f0da6e84e {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.959593] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "b322a084-d312-45b9-90d3-11c2180c71f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.959593] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "b322a084-d312-45b9-90d3-11c2180c71f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.984977] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Acquiring lock "1f26c898-895c-4256-a0c4-a1596279acc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.985207] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "1f26c898-895c-4256-a0c4-a1596279acc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.163748] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.179177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.179177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.179177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.179289] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1080.180966] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58da829f-0e9a-4c91-94bb-a1ffc6c5b976 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.194431] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0997f186-47d4-43e0-8769-90eb52fc61a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.211591] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5b9beb-c201-4b28-98c2-3865604ee9d5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.219822] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569e86a5-dd05-44e6-bb22-6727b3f0f6b6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.251452] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180756MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1080.251607] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.251808] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.353655] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.353831] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.353978] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.355835] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1080.381210] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e9e201ea-9561-483b-a39e-6180fc6f5a2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.401023] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 287a3859-9150-414f-a4ef-2ba3af8edc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.416927] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.437823] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.451657] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.464766] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f0d3a02d-ddb9-4338-989e-e256fb50ede5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.478441] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e261660b-b9ee-487c-b044-ce1325c8e2ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.491221] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b042b18a-4efb-431a-afa6-f4dc8b4c1bd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.503979] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4de44f46-3872-46f4-afb4-308cc8b18c89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.520983] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.527846] env[62813]: DEBUG nova.compute.manager [req-4ef47d1a-e893-4b8a-9062-f90169364ad1 req-d5b94eb8-c6e8-4f9c-9d68-150b5ca4e6d1 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Received event network-vif-plugged-13425f26-721b-4f19-b434-ee2f0da6e84e {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1080.529597] env[62813]: DEBUG oslo_concurrency.lockutils [req-4ef47d1a-e893-4b8a-9062-f90169364ad1 req-d5b94eb8-c6e8-4f9c-9d68-150b5ca4e6d1 service nova] Acquiring lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.529804] env[62813]: DEBUG oslo_concurrency.lockutils [req-4ef47d1a-e893-4b8a-9062-f90169364ad1 req-d5b94eb8-c6e8-4f9c-9d68-150b5ca4e6d1 service nova] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.529988] env[62813]: DEBUG oslo_concurrency.lockutils [req-4ef47d1a-e893-4b8a-9062-f90169364ad1 req-d5b94eb8-c6e8-4f9c-9d68-150b5ca4e6d1 service nova] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.530470] env[62813]: DEBUG nova.compute.manager [req-4ef47d1a-e893-4b8a-9062-f90169364ad1 req-d5b94eb8-c6e8-4f9c-9d68-150b5ca4e6d1 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] No waiting events found dispatching network-vif-plugged-13425f26-721b-4f19-b434-ee2f0da6e84e {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1080.530470] env[62813]: WARNING nova.compute.manager [req-4ef47d1a-e893-4b8a-9062-f90169364ad1 req-d5b94eb8-c6e8-4f9c-9d68-150b5ca4e6d1 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Received unexpected event network-vif-plugged-13425f26-721b-4f19-b434-ee2f0da6e84e for instance with vm_state building and task_state spawning. [ 1080.534982] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e00cefc-ffe9-41e4-9520-281d937e32ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.547267] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance ab1749b4-a87b-4bd2-bdad-b2db446f44d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.561788] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 81ac869e-c8ab-4f75-bfb7-bbb3296c24dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.574451] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6943412a-83f2-437b-80af-4a2de7ed5029 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.586556] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 3a85472c-25b9-4fb7-a438-84fa699d7f0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.604801] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance bae5a146-4946-4e03-a6f5-062e79a61def has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.616514] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b322a084-d312-45b9-90d3-11c2180c71f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.628226] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1f26c898-895c-4256-a0c4-a1596279acc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1080.628673] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1080.628778] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1080.638231] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Successfully updated port: 13425f26-721b-4f19-b434-ee2f0da6e84e {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1080.648423] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "refresh_cache-eec98a4d-34f4-4313-8f9c-2fe9f483959c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.648624] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquired lock "refresh_cache-eec98a4d-34f4-4313-8f9c-2fe9f483959c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.648872] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1080.702891] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1080.943170] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Updating instance_info_cache with network_info: [{"id": "13425f26-721b-4f19-b434-ee2f0da6e84e", "address": "fa:16:3e:ee:49:11", "network": {"id": "e13b5bb0-4cbb-43b9-aaf6-4848a2e48352", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-931452788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9861046b5b014731a1b828ac24a57e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13425f26-72", "ovs_interfaceid": "13425f26-721b-4f19-b434-ee2f0da6e84e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.959986] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Releasing lock "refresh_cache-eec98a4d-34f4-4313-8f9c-2fe9f483959c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.960316] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance network_info: |[{"id": "13425f26-721b-4f19-b434-ee2f0da6e84e", "address": "fa:16:3e:ee:49:11", "network": {"id": "e13b5bb0-4cbb-43b9-aaf6-4848a2e48352", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-931452788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9861046b5b014731a1b828ac24a57e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13425f26-72", "ovs_interfaceid": "13425f26-721b-4f19-b434-ee2f0da6e84e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1080.960731] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:49:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13425f26-721b-4f19-b434-ee2f0da6e84e', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.969601] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Creating folder: Project (9861046b5b014731a1b828ac24a57e87). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1080.970360] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-568f3daf-d659-4160-8bb0-4c248c74ecf9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.991313] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Created folder: Project (9861046b5b014731a1b828ac24a57e87) in parent group-v840812. [ 1080.991531] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Creating folder: Instances. Parent ref: group-v840876. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1080.991775] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdcdc3cd-0524-4255-a315-cdcd698e3e15 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.000615] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Created folder: Instances in parent group-v840876. [ 1081.000914] env[62813]: DEBUG oslo.service.loopingcall [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.001157] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1081.001313] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9a54ab1-aed0-4a00-84b3-fe8b5a08c31a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.023921] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.023921] env[62813]: value = "task-4267667" [ 1081.023921] env[62813]: _type = "Task" [ 1081.023921] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.035041] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267667, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.072924] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07efe481-41ef-4ec4-82d3-faafce1a5afd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.080922] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51adc3c9-95a2-4ea5-b1f2-8e41b5703cba {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.857637] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5124178-10a3-4190-b52a-1e9fb2bcdb99 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.868568] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1732d689-6aea-42cf-8e6b-7a644ec69c69 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.872354] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267667, 'name': CreateVM_Task, 'duration_secs': 0.329989} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.872542] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1081.873656] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.873891] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.874281] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1081.874584] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-809554c7-b1fb-44d2-b22a-f566bb395805 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.884093] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.888958] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Waiting for the task: (returnval){ [ 1081.888958] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52404ee6-b8b7-d63d-8d80-8bd3dc4bee07" [ 1081.888958] env[62813]: _type = "Task" [ 1081.888958] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.893514] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1081.902441] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52404ee6-b8b7-d63d-8d80-8bd3dc4bee07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.909674] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1081.909886] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.658s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.400042] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.400338] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.400553] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.579223] env[62813]: DEBUG nova.compute.manager [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Received event network-changed-13425f26-721b-4f19-b434-ee2f0da6e84e {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1082.579461] env[62813]: DEBUG nova.compute.manager [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Refreshing instance network info cache due to event network-changed-13425f26-721b-4f19-b434-ee2f0da6e84e. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1082.579682] env[62813]: DEBUG oslo_concurrency.lockutils [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] Acquiring lock "refresh_cache-eec98a4d-34f4-4313-8f9c-2fe9f483959c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.579828] env[62813]: DEBUG oslo_concurrency.lockutils [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] Acquired lock "refresh_cache-eec98a4d-34f4-4313-8f9c-2fe9f483959c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.579988] env[62813]: DEBUG nova.network.neutron [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Refreshing network info cache for port 13425f26-721b-4f19-b434-ee2f0da6e84e {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1082.894015] env[62813]: DEBUG nova.network.neutron [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Updated VIF entry in instance network info cache for port 13425f26-721b-4f19-b434-ee2f0da6e84e. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1082.894459] env[62813]: DEBUG nova.network.neutron [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Updating instance_info_cache with network_info: [{"id": "13425f26-721b-4f19-b434-ee2f0da6e84e", "address": "fa:16:3e:ee:49:11", "network": {"id": "e13b5bb0-4cbb-43b9-aaf6-4848a2e48352", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-931452788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9861046b5b014731a1b828ac24a57e87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13425f26-72", "ovs_interfaceid": "13425f26-721b-4f19-b434-ee2f0da6e84e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.904196] env[62813]: DEBUG oslo_concurrency.lockutils [req-a4f5f26f-87d9-441f-a4e2-b3f0400da753 req-926f0ca2-22fd-4b73-ba68-0a5bfb206c41 service nova] Releasing lock "refresh_cache-eec98a4d-34f4-4313-8f9c-2fe9f483959c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.356583] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.585096] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.585477] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.624494] env[62813]: WARNING oslo_vmware.rw_handles [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1125.624494] env[62813]: ERROR oslo_vmware.rw_handles [ 1125.625163] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1125.627185] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1125.627440] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Copying Virtual Disk [datastore2] vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/e6c22f25-5655-45eb-a337-e1251f930df0/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1125.627722] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5be10ede-3f25-4b87-a61f-595f92cb489b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.636134] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Waiting for the task: (returnval){ [ 1125.636134] env[62813]: value = "task-4267668" [ 1125.636134] env[62813]: _type = "Task" [ 1125.636134] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.644726] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Task: {'id': task-4267668, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.147208] env[62813]: DEBUG oslo_vmware.exceptions [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1126.147508] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.148099] env[62813]: ERROR nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1126.148099] env[62813]: Faults: ['InvalidArgument'] [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Traceback (most recent call last): [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] yield resources [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self.driver.spawn(context, instance, image_meta, [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self._fetch_image_if_missing(context, vi) [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] image_cache(vi, tmp_image_ds_loc) [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] vm_util.copy_virtual_disk( [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] session._wait_for_task(vmdk_copy_task) [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] return self.wait_for_task(task_ref) [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] return evt.wait() [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] result = hub.switch() [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] return self.greenlet.switch() [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self.f(*self.args, **self.kw) [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] raise exceptions.translate_fault(task_info.error) [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Faults: ['InvalidArgument'] [ 1126.148099] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] [ 1126.149118] env[62813]: INFO nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Terminating instance [ 1126.150121] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.150331] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.150572] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3424a78-aa9f-4b32-b92e-c1b7af03e4f0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.153119] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1126.153311] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1126.154075] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26eff758-d340-428f-9564-d6e9882ce00e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.162163] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1126.163224] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7195991-ac8c-4c0b-9b00-f48bea4ed8ea {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.164726] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.164902] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1126.165584] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c3bb613-2b2a-4c46-8813-a149e5f5a3ee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.170974] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Waiting for the task: (returnval){ [ 1126.170974] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52a63821-3e1b-58ec-b49f-113d98c8f025" [ 1126.170974] env[62813]: _type = "Task" [ 1126.170974] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.179301] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52a63821-3e1b-58ec-b49f-113d98c8f025, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.238768] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1126.238992] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1126.239193] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Deleting the datastore file [datastore2] 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1126.239466] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-398a4586-8239-4c81-b05d-ce45eb8b8bf7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.245892] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Waiting for the task: (returnval){ [ 1126.245892] env[62813]: value = "task-4267670" [ 1126.245892] env[62813]: _type = "Task" [ 1126.245892] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.253732] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Task: {'id': task-4267670, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.682537] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1126.682966] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Creating directory with path [datastore2] vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.683386] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec6343a4-aeb6-42ce-b1e4-1befce24f0a2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.697176] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Created directory with path [datastore2] vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.697438] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Fetch image to [datastore2] vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1126.697654] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1126.698617] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71520f61-14ea-4445-adaf-b85fffdfe714 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.707422] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac96b199-7ce2-4316-bc2c-fb739a207fbb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.718541] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf57ddd-4eaa-4a68-972b-1b130e4cb913 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.759765] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d05b27e-3a0c-4778-b845-9647c73e936a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.766654] env[62813]: DEBUG oslo_vmware.api [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Task: {'id': task-4267670, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081967} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.768238] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1126.768436] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1126.768609] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1126.769485] env[62813]: INFO nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1126.770940] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6847bae9-ec8c-4c55-9fcd-0bdfbee78e89 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.773267] env[62813]: DEBUG nova.compute.claims [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1126.773445] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.773666] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.800933] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1126.881853] env[62813]: DEBUG oslo_vmware.rw_handles [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1126.944038] env[62813]: DEBUG oslo_vmware.rw_handles [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1126.944225] env[62813]: DEBUG oslo_vmware.rw_handles [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1127.265959] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e7c599-4e62-4a64-ae3c-e6445f3b9b63 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.274587] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad1543a-09c3-4aa3-a691-41c771d67eed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.305797] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892806b8-da21-47a8-8cad-5af61aa06ede {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.313943] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e191c5b0-255a-4277-b2c4-9bc3956b5e65 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.327796] env[62813]: DEBUG nova.compute.provider_tree [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.338518] env[62813]: DEBUG nova.scheduler.client.report [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1127.359971] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.586s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.360548] env[62813]: ERROR nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1127.360548] env[62813]: Faults: ['InvalidArgument'] [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Traceback (most recent call last): [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self.driver.spawn(context, instance, image_meta, [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self._fetch_image_if_missing(context, vi) [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] image_cache(vi, tmp_image_ds_loc) [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] vm_util.copy_virtual_disk( [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] session._wait_for_task(vmdk_copy_task) [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] return self.wait_for_task(task_ref) [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] return evt.wait() [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] result = hub.switch() [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] return self.greenlet.switch() [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] self.f(*self.args, **self.kw) [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] raise exceptions.translate_fault(task_info.error) [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Faults: ['InvalidArgument'] [ 1127.360548] env[62813]: ERROR nova.compute.manager [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] [ 1127.361475] env[62813]: DEBUG nova.compute.utils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1127.362998] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Build of instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 was re-scheduled: A specified parameter was not correct: fileType [ 1127.362998] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1127.363527] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1127.363730] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1127.363909] env[62813]: DEBUG nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1127.364088] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1128.045756] env[62813]: DEBUG nova.network.neutron [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.079229] env[62813]: INFO nova.compute.manager [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Took 0.71 seconds to deallocate network for instance. [ 1128.265451] env[62813]: INFO nova.scheduler.client.report [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Deleted allocations for instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 [ 1128.314527] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7c7f2812-300d-4f22-b8e6-ca0def8d926a tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 526.483s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.315661] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 328.673s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.315878] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Acquiring lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.316099] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.316270] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.318318] env[62813]: INFO nova.compute.manager [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Terminating instance [ 1128.320061] env[62813]: DEBUG nova.compute.manager [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1128.320278] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1128.320756] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd1637bf-fbc9-447d-ac97-352b77e66814 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.333017] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6fe113-98c2-4303-b94e-ceb5d4afa394 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.356029] env[62813]: DEBUG nova.compute.manager [None req-c50224e8-5bc1-465f-a52f-f33fcfe4adc0 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] [instance: e9e201ea-9561-483b-a39e-6180fc6f5a2a] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1128.376972] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0 could not be found. [ 1128.377223] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1128.377406] env[62813]: INFO nova.compute.manager [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1128.377662] env[62813]: DEBUG oslo.service.loopingcall [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.377900] env[62813]: DEBUG nova.compute.manager [-] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1128.377997] env[62813]: DEBUG nova.network.neutron [-] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1128.413331] env[62813]: DEBUG nova.compute.manager [None req-c50224e8-5bc1-465f-a52f-f33fcfe4adc0 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] [instance: e9e201ea-9561-483b-a39e-6180fc6f5a2a] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1128.416771] env[62813]: DEBUG nova.network.neutron [-] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.428362] env[62813]: INFO nova.compute.manager [-] [instance: 4357ed8f-14f7-4d44-8d1b-262ccaafe1e0] Took 0.05 seconds to deallocate network for instance. [ 1128.449985] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c50224e8-5bc1-465f-a52f-f33fcfe4adc0 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] Lock "e9e201ea-9561-483b-a39e-6180fc6f5a2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.422s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.460311] env[62813]: DEBUG nova.compute.manager [None req-de566c29-5c77-4c6f-aa2b-419069f5eaa9 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] [instance: 287a3859-9150-414f-a4ef-2ba3af8edc8b] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1128.489461] env[62813]: DEBUG nova.compute.manager [None req-de566c29-5c77-4c6f-aa2b-419069f5eaa9 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] [instance: 287a3859-9150-414f-a4ef-2ba3af8edc8b] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1128.520066] env[62813]: DEBUG oslo_concurrency.lockutils [None req-de566c29-5c77-4c6f-aa2b-419069f5eaa9 tempest-ListImageFiltersTestJSON-2088619778 tempest-ListImageFiltersTestJSON-2088619778-project-member] Lock "287a3859-9150-414f-a4ef-2ba3af8edc8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.788s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.531778] env[62813]: DEBUG nova.compute.manager [None req-4c5a96f3-ab92-4b2f-99ba-b662823f7abb tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1128.560717] env[62813]: DEBUG nova.compute.manager [None req-4c5a96f3-ab92-4b2f-99ba-b662823f7abb tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1128.608307] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4c5a96f3-ab92-4b2f-99ba-b662823f7abb tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "081b2d72-3954-4ce8-b8dd-6b4d6c1eea9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.736s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.613812] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5baa827c-9540-44ed-a6b3-ee557fe3d672 tempest-ServerMetadataNegativeTestJSON-488446921 tempest-ServerMetadataNegativeTestJSON-488446921-project-member] Lock "4357ed8f-14f7-4d44-8d1b-262ccaafe1e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.298s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.622723] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1128.745216] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.745490] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.747194] env[62813]: INFO nova.compute.claims [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.138782] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf49389-157f-495b-8600-c7c3352b09aa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.147283] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdc01f8-d6d5-43f5-9376-51dae83c8846 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.180757] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f134a2c4-7b96-4872-b8f0-8d6476d233cc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.189829] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25da9910-2c31-4668-84b5-28f314fa3d35 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.205530] env[62813]: DEBUG nova.compute.provider_tree [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.217617] env[62813]: DEBUG nova.scheduler.client.report [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1129.243158] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.497s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.243677] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1129.295870] env[62813]: DEBUG nova.compute.utils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.297691] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1129.297691] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1129.320701] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1129.377888] env[62813]: DEBUG nova.policy [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8825dc7a3d6c457883432fb38cc0a83b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09c8790d652a464fb2622aec202522ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1129.394301] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1129.437135] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1129.437392] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1129.437559] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.437749] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1129.437901] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.438161] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1129.438412] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1129.438577] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1129.438755] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1129.438913] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1129.439128] env[62813]: DEBUG nova.virt.hardware [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1129.440014] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4109b036-c6c4-4dc5-bf16-8c9f33d5ed4b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.448911] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054f177d-ef92-4d4d-a3d5-5870cbd43dbd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.122062] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Successfully created port: 4acf092b-dce0-48f7-aa2b-390f01d7718b {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1130.164088] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.164088] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1130.189250] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] There are 0 instances to clean {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1130.659609] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Successfully created port: 0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1131.641049] env[62813]: DEBUG nova.compute.manager [req-6d98c7fe-d36f-4ce2-a35b-7380af76907e req-0808fa78-e150-4466-bfe8-6b583d9ad858 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Received event network-vif-plugged-4acf092b-dce0-48f7-aa2b-390f01d7718b {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1131.641340] env[62813]: DEBUG oslo_concurrency.lockutils [req-6d98c7fe-d36f-4ce2-a35b-7380af76907e req-0808fa78-e150-4466-bfe8-6b583d9ad858 service nova] Acquiring lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.641515] env[62813]: DEBUG oslo_concurrency.lockutils [req-6d98c7fe-d36f-4ce2-a35b-7380af76907e req-0808fa78-e150-4466-bfe8-6b583d9ad858 service nova] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.642313] env[62813]: DEBUG oslo_concurrency.lockutils [req-6d98c7fe-d36f-4ce2-a35b-7380af76907e req-0808fa78-e150-4466-bfe8-6b583d9ad858 service nova] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.642528] env[62813]: DEBUG nova.compute.manager [req-6d98c7fe-d36f-4ce2-a35b-7380af76907e req-0808fa78-e150-4466-bfe8-6b583d9ad858 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] No waiting events found dispatching network-vif-plugged-4acf092b-dce0-48f7-aa2b-390f01d7718b {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1131.642767] env[62813]: WARNING nova.compute.manager [req-6d98c7fe-d36f-4ce2-a35b-7380af76907e req-0808fa78-e150-4466-bfe8-6b583d9ad858 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Received unexpected event network-vif-plugged-4acf092b-dce0-48f7-aa2b-390f01d7718b for instance with vm_state building and task_state spawning. [ 1131.660336] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Successfully updated port: 4acf092b-dce0-48f7-aa2b-390f01d7718b {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1132.164391] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.164568] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances with incomplete migration {{(pid=62813) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1132.676532] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Successfully updated port: 0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1132.701300] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.701467] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.701659] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1132.961374] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1133.453824] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Updating instance_info_cache with network_info: [{"id": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "address": "fa:16:3e:47:c8:eb", "network": {"id": "faed3d45-8feb-4953-90ca-abc189f7d25d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-674369454", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4acf092b-dc", "ovs_interfaceid": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "address": "fa:16:3e:72:d2:92", "network": {"id": "f5a0a6d6-042f-4b9b-a19a-80db2fc69909", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2072666839", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4ad3fb-b7", "ovs_interfaceid": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.478810] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Releasing lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.479184] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance network_info: |[{"id": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "address": "fa:16:3e:47:c8:eb", "network": {"id": "faed3d45-8feb-4953-90ca-abc189f7d25d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-674369454", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4acf092b-dc", "ovs_interfaceid": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "address": "fa:16:3e:72:d2:92", "network": {"id": "f5a0a6d6-042f-4b9b-a19a-80db2fc69909", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2072666839", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4ad3fb-b7", "ovs_interfaceid": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1133.479751] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:c8:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93c5b7ce-4c84-40bc-884c-b2453e0eee69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4acf092b-dce0-48f7-aa2b-390f01d7718b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:d2:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c4ad3fb-b7ca-4a0d-bada-126d4f591c38', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.489294] env[62813]: DEBUG oslo.service.loopingcall [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1133.490211] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1133.490453] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fdbf377-8173-4d42-a423-b4968b20e081 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.513792] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.513792] env[62813]: value = "task-4267671" [ 1133.513792] env[62813]: _type = "Task" [ 1133.513792] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.521953] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267671, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.668307] env[62813]: DEBUG nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Received event network-changed-4acf092b-dce0-48f7-aa2b-390f01d7718b {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1133.668307] env[62813]: DEBUG nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Refreshing instance network info cache due to event network-changed-4acf092b-dce0-48f7-aa2b-390f01d7718b. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1133.668544] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Acquiring lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.668613] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Acquired lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.668818] env[62813]: DEBUG nova.network.neutron [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Refreshing network info cache for port 4acf092b-dce0-48f7-aa2b-390f01d7718b {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1133.985651] env[62813]: DEBUG nova.network.neutron [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Updated VIF entry in instance network info cache for port 4acf092b-dce0-48f7-aa2b-390f01d7718b. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1133.986083] env[62813]: DEBUG nova.network.neutron [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Updating instance_info_cache with network_info: [{"id": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "address": "fa:16:3e:47:c8:eb", "network": {"id": "faed3d45-8feb-4953-90ca-abc189f7d25d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-674369454", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4acf092b-dc", "ovs_interfaceid": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "address": "fa:16:3e:72:d2:92", "network": {"id": "f5a0a6d6-042f-4b9b-a19a-80db2fc69909", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2072666839", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4ad3fb-b7", "ovs_interfaceid": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.998300] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Releasing lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.998543] env[62813]: DEBUG nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Received event network-vif-plugged-0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1133.998740] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Acquiring lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.999471] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.999471] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.999471] env[62813]: DEBUG nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] No waiting events found dispatching network-vif-plugged-0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1133.999471] env[62813]: WARNING nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Received unexpected event network-vif-plugged-0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 for instance with vm_state building and task_state spawning. [ 1133.999660] env[62813]: DEBUG nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Received event network-changed-0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1133.999847] env[62813]: DEBUG nova.compute.manager [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Refreshing instance network info cache due to event network-changed-0c4ad3fb-b7ca-4a0d-bada-126d4f591c38. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1134.000084] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Acquiring lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.000269] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Acquired lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.000440] env[62813]: DEBUG nova.network.neutron [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Refreshing network info cache for port 0c4ad3fb-b7ca-4a0d-bada-126d4f591c38 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1134.023708] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267671, 'name': CreateVM_Task, 'duration_secs': 0.407997} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.023806] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1134.024545] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.024697] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.025022] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1134.025275] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e12ec4be-f8c5-4636-8e43-bfcdddc8d492 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.030381] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 1134.030381] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52b737bb-c45b-dca3-4fda-7c79320f3a7d" [ 1134.030381] env[62813]: _type = "Task" [ 1134.030381] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.038584] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52b737bb-c45b-dca3-4fda-7c79320f3a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.179408] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.179707] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.179883] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1134.337390] env[62813]: DEBUG nova.network.neutron [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Updated VIF entry in instance network info cache for port 0c4ad3fb-b7ca-4a0d-bada-126d4f591c38. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1134.337951] env[62813]: DEBUG nova.network.neutron [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Updating instance_info_cache with network_info: [{"id": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "address": "fa:16:3e:47:c8:eb", "network": {"id": "faed3d45-8feb-4953-90ca-abc189f7d25d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-674369454", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4acf092b-dc", "ovs_interfaceid": "4acf092b-dce0-48f7-aa2b-390f01d7718b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "address": "fa:16:3e:72:d2:92", "network": {"id": "f5a0a6d6-042f-4b9b-a19a-80db2fc69909", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2072666839", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.216", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "09c8790d652a464fb2622aec202522ea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4ad3fb-b7", "ovs_interfaceid": "0c4ad3fb-b7ca-4a0d-bada-126d4f591c38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.347697] env[62813]: DEBUG oslo_concurrency.lockutils [req-631ca40c-7b58-4f12-970f-3fada95f160f req-936242ea-1856-4184-8a95-d20e6f572762 service nova] Releasing lock "refresh_cache-d79298e6-bb55-4ba6-9a68-e460c8953c7a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.541977] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.542243] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.542462] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.685923] env[62813]: DEBUG oslo_concurrency.lockutils [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.159123] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.164044] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.164168] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.164304] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.174784] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.175043] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1140.175169] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1140.203469] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.203669] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.203853] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204051] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204157] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204287] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204410] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204530] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204696] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204823] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1140.204947] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1140.205508] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.206181] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.582034] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.582230] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.163429] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.201137] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.216025] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.216025] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.216025] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.216207] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1141.217221] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8e88e3-70cf-4951-a73d-759da0fc2e71 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.227616] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f81fda-6f8d-44e5-b1c6-ffa62785caf8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.242254] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5df0c0-0478-47a7-9574-832057a5211a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.249256] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cd6362-1c1f-4ea9-aa46-b8050ddecc3f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.280172] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180783MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1141.280354] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.280559] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.568102] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568234] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568302] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568432] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568558] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568681] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568822] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.568987] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.569140] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.569262] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.581630] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.592770] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f0d3a02d-ddb9-4338-989e-e256fb50ede5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.604095] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e261660b-b9ee-487c-b044-ce1325c8e2ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.615188] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b042b18a-4efb-431a-afa6-f4dc8b4c1bd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.629765] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4de44f46-3872-46f4-afb4-308cc8b18c89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.667367] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.680330] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e00cefc-ffe9-41e4-9520-281d937e32ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.691649] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance ab1749b4-a87b-4bd2-bdad-b2db446f44d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.702688] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 81ac869e-c8ab-4f75-bfb7-bbb3296c24dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.714865] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6943412a-83f2-437b-80af-4a2de7ed5029 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.726919] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 3a85472c-25b9-4fb7-a438-84fa699d7f0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.738888] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance bae5a146-4946-4e03-a6f5-062e79a61def has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.751056] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b322a084-d312-45b9-90d3-11c2180c71f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.761394] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1f26c898-895c-4256-a0c4-a1596279acc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.772155] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.783330] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1141.783638] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1141.783721] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1141.807590] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing inventories for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1141.824787] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating ProviderTree inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1141.824993] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1141.836790] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing aggregate associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, aggregates: None {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1141.868211] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing trait associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1142.207369] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72853cd6-0067-4e6e-9d7a-d3167f6cbe0e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.215597] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb14b0b-ebb1-48b2-a9ce-99aba1cc6f9d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.245811] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00285a8-0179-4774-8ccf-72b69dc49fc9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.253676] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74146b9d-ef64-4189-a365-209097eb57df {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.266845] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.276269] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1142.298778] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1142.298966] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.018s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.639973] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.676156] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Getting list of instances from cluster (obj){ [ 1148.676156] env[62813]: value = "domain-c8" [ 1148.676156] env[62813]: _type = "ClusterComputeResource" [ 1148.676156] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1148.676650] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b886fe42-ff81-48b6-acb2-591290fbf74a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.693806] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Got total of 10 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1148.693979] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 76b0e03d-9636-4328-bfd5-17c434cfae72 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.694191] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 356088a2-b55e-4ff1-9422-a53ab6830fc9 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.694352] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 489b821e-f7d0-446f-8197-550c808e5a99 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.694505] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid b946bdda-a8a4-4a82-b2f7-99637fcae21c {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.694656] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.694807] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid c9b7bace-d76a-4dd8-8283-b56fd86a77a4 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.694961] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 1d8d7576-935b-4f51-8475-fe09aad4ea7c {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.695269] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid a3cd73ab-b0e7-43f8-bba7-8539f89a1787 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.695445] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid eec98a4d-34f4-4313-8f9c-2fe9f483959c {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.695597] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid d79298e6-bb55-4ba6-9a68-e460c8953c7a {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1148.695929] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "76b0e03d-9636-4328-bfd5-17c434cfae72" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.696188] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.696391] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "489b821e-f7d0-446f-8197-550c808e5a99" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.696583] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.696776] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.696979] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.697188] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.697379] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.697569] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.698620] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.641564] env[62813]: WARNING oslo_vmware.rw_handles [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1175.641564] env[62813]: ERROR oslo_vmware.rw_handles [ 1175.642405] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1175.644626] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1175.645014] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Copying Virtual Disk [datastore2] vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/aae01a93-2834-4f80-b5cc-d6b582f65d7e/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1175.645457] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2a6f75f-088f-48af-8e0b-e2123f9d751f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.655410] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Waiting for the task: (returnval){ [ 1175.655410] env[62813]: value = "task-4267672" [ 1175.655410] env[62813]: _type = "Task" [ 1175.655410] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.664793] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Task: {'id': task-4267672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.166674] env[62813]: DEBUG oslo_vmware.exceptions [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1176.167186] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.167821] env[62813]: ERROR nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1176.167821] env[62813]: Faults: ['InvalidArgument'] [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Traceback (most recent call last): [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] yield resources [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self.driver.spawn(context, instance, image_meta, [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self._fetch_image_if_missing(context, vi) [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] image_cache(vi, tmp_image_ds_loc) [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] vm_util.copy_virtual_disk( [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] session._wait_for_task(vmdk_copy_task) [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] return self.wait_for_task(task_ref) [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] return evt.wait() [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] result = hub.switch() [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] return self.greenlet.switch() [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self.f(*self.args, **self.kw) [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] raise exceptions.translate_fault(task_info.error) [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Faults: ['InvalidArgument'] [ 1176.167821] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] [ 1176.169556] env[62813]: INFO nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Terminating instance [ 1176.170395] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.170602] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.170861] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5bae0f3-dc25-4ce8-b94f-2207dfd076db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.173233] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1176.173442] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1176.174237] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28b3d8a-bf86-421f-94a0-51908bfad739 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.181982] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1176.182270] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bf9be93-0d99-4838-be66-4e4b4166c2aa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.184786] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.184965] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1176.185997] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c69c4ad8-767c-4677-95fd-e33db2ffa4d9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.191567] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Waiting for the task: (returnval){ [ 1176.191567] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52f0d7ba-94a7-960a-ee80-e5383f2c958d" [ 1176.191567] env[62813]: _type = "Task" [ 1176.191567] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.200134] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52f0d7ba-94a7-960a-ee80-e5383f2c958d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.261114] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1176.261402] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1176.261615] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Deleting the datastore file [datastore2] 76b0e03d-9636-4328-bfd5-17c434cfae72 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.261925] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7744e8ed-7bd0-4b1e-8cae-82f4fb16adf5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.269608] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Waiting for the task: (returnval){ [ 1176.269608] env[62813]: value = "task-4267674" [ 1176.269608] env[62813]: _type = "Task" [ 1176.269608] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.278714] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Task: {'id': task-4267674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.702700] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1176.703066] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Creating directory with path [datastore2] vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.703290] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d220d22b-1584-4bcd-953a-767f8e334ffd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.715657] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Created directory with path [datastore2] vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.715870] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Fetch image to [datastore2] vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1176.716070] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1176.716899] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d920477-9d8d-4cb6-b052-b091576dec4c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.725099] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f9cc56-0b6a-49fa-b28f-8a9b7db3d940 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.735425] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f437e4a-07ca-4191-b138-2eeeed45de22 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.766883] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d880c2-18d0-43dc-87b3-b7aa1dd9df93 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.775821] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0199ae92-91a4-41b4-be4e-2b741c653761 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.780145] env[62813]: DEBUG oslo_vmware.api [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Task: {'id': task-4267674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069458} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.780695] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.780883] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1176.781067] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1176.781243] env[62813]: INFO nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1176.783434] env[62813]: DEBUG nova.compute.claims [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1176.783629] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.783824] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.801125] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1176.856356] env[62813]: DEBUG oslo_vmware.rw_handles [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1176.915897] env[62813]: DEBUG oslo_vmware.rw_handles [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1176.916102] env[62813]: DEBUG oslo_vmware.rw_handles [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1177.213221] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec58f242-b783-48fd-a69b-a6ec2bce1335 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.221452] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce969c2-1b6d-4abe-84ca-87f0f5959780 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.284112] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44f7ecb-f97c-42d1-b896-f4321b673ad9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.299363] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b98158c-bde7-4a92-acd6-a15b0263fdba {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.319212] env[62813]: DEBUG nova.compute.provider_tree [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.329499] env[62813]: DEBUG nova.scheduler.client.report [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1177.348720] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.565s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.349323] env[62813]: ERROR nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1177.349323] env[62813]: Faults: ['InvalidArgument'] [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Traceback (most recent call last): [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self.driver.spawn(context, instance, image_meta, [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self._fetch_image_if_missing(context, vi) [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] image_cache(vi, tmp_image_ds_loc) [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] vm_util.copy_virtual_disk( [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] session._wait_for_task(vmdk_copy_task) [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] return self.wait_for_task(task_ref) [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] return evt.wait() [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] result = hub.switch() [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] return self.greenlet.switch() [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] self.f(*self.args, **self.kw) [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] raise exceptions.translate_fault(task_info.error) [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Faults: ['InvalidArgument'] [ 1177.349323] env[62813]: ERROR nova.compute.manager [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] [ 1177.351619] env[62813]: DEBUG nova.compute.utils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1177.351947] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Build of instance 76b0e03d-9636-4328-bfd5-17c434cfae72 was re-scheduled: A specified parameter was not correct: fileType [ 1177.351947] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1177.352422] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1177.352695] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1177.352820] env[62813]: DEBUG nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1177.352923] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1177.734048] env[62813]: DEBUG nova.network.neutron [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.748500] env[62813]: INFO nova.compute.manager [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Took 0.39 seconds to deallocate network for instance. [ 1177.860922] env[62813]: INFO nova.scheduler.client.report [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Deleted allocations for instance 76b0e03d-9636-4328-bfd5-17c434cfae72 [ 1177.879429] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fbeea5d4-f922-41cd-9baf-605a0f461936 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 572.297s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.880679] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.832s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.880903] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Acquiring lock "76b0e03d-9636-4328-bfd5-17c434cfae72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.881127] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.881301] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.883925] env[62813]: INFO nova.compute.manager [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Terminating instance [ 1177.885908] env[62813]: DEBUG nova.compute.manager [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1177.886164] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1177.886443] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2aa777b8-cbbc-40b2-9286-7acab2891a60 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.895791] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebb300d-a2df-40e2-bfe9-e9d1f6a838a9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.906801] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1177.929207] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 76b0e03d-9636-4328-bfd5-17c434cfae72 could not be found. [ 1177.929438] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1177.929622] env[62813]: INFO nova.compute.manager [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1177.929875] env[62813]: DEBUG oslo.service.loopingcall [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.930193] env[62813]: DEBUG nova.compute.manager [-] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1177.930302] env[62813]: DEBUG nova.network.neutron [-] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1177.963237] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.963517] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.965135] env[62813]: INFO nova.compute.claims [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1177.968838] env[62813]: DEBUG nova.network.neutron [-] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.976242] env[62813]: INFO nova.compute.manager [-] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] Took 0.05 seconds to deallocate network for instance. [ 1178.107346] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6d47da77-97ec-443b-a8c3-88ee26c955f9 tempest-ImagesOneServerNegativeTestJSON-1157034213 tempest-ImagesOneServerNegativeTestJSON-1157034213-project-member] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.227s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.108789] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 29.413s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.109401] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 76b0e03d-9636-4328-bfd5-17c434cfae72] During sync_power_state the instance has a pending task (deleting). Skip. [ 1178.109401] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "76b0e03d-9636-4328-bfd5-17c434cfae72" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.380505] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413df0b4-c28a-4e46-b7f9-65ae18fcef55 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.388897] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c4cb73-1c7a-4199-b368-9745d867f6cf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.421569] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885e4b5a-6836-4b1a-90e5-f2a1da92401f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.430016] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d722c9-3ea2-477c-af53-e396c36d1117 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.444848] env[62813]: DEBUG nova.compute.provider_tree [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.458818] env[62813]: DEBUG nova.scheduler.client.report [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1178.476685] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.513s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.477256] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1178.517776] env[62813]: DEBUG nova.compute.utils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1178.518997] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1178.519303] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1178.530386] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1178.587009] env[62813]: DEBUG nova.policy [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65799c813c81459286b669666c703d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0921c465ef944f1a50af55040cf7621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1178.602571] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1178.629299] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1178.629501] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1178.630349] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.630349] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1178.630349] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.630349] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1178.630544] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1178.630544] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1178.630692] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1178.630859] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1178.631044] env[62813]: DEBUG nova.virt.hardware [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1178.631925] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982aad48-2ce1-48b8-b9a4-bf2b47a33b02 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.641126] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a649ab-da6c-40fb-a87e-4d4bc3ab952f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.963042] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Successfully created port: 6c3faba1-d6ae-4109-b137-f129167b43d2 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1179.748735] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Successfully updated port: 6c3faba1-d6ae-4109-b137-f129167b43d2 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1179.761916] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "refresh_cache-e6442505-b5d0-4736-a24a-41fccda6da6f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.763279] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired lock "refresh_cache-e6442505-b5d0-4736-a24a-41fccda6da6f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.763279] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1179.837795] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1179.872739] env[62813]: DEBUG nova.compute.manager [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Received event network-vif-plugged-6c3faba1-d6ae-4109-b137-f129167b43d2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1179.872973] env[62813]: DEBUG oslo_concurrency.lockutils [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] Acquiring lock "e6442505-b5d0-4736-a24a-41fccda6da6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.873224] env[62813]: DEBUG oslo_concurrency.lockutils [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.873411] env[62813]: DEBUG oslo_concurrency.lockutils [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.873585] env[62813]: DEBUG nova.compute.manager [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] No waiting events found dispatching network-vif-plugged-6c3faba1-d6ae-4109-b137-f129167b43d2 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1179.873750] env[62813]: WARNING nova.compute.manager [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Received unexpected event network-vif-plugged-6c3faba1-d6ae-4109-b137-f129167b43d2 for instance with vm_state building and task_state spawning. [ 1179.873914] env[62813]: DEBUG nova.compute.manager [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Received event network-changed-6c3faba1-d6ae-4109-b137-f129167b43d2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1179.874089] env[62813]: DEBUG nova.compute.manager [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Refreshing instance network info cache due to event network-changed-6c3faba1-d6ae-4109-b137-f129167b43d2. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1179.874370] env[62813]: DEBUG oslo_concurrency.lockutils [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] Acquiring lock "refresh_cache-e6442505-b5d0-4736-a24a-41fccda6da6f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.119954] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Updating instance_info_cache with network_info: [{"id": "6c3faba1-d6ae-4109-b137-f129167b43d2", "address": "fa:16:3e:b3:87:be", "network": {"id": "8b829171-2963-4efb-b74a-73be4c3522fe", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1850443047-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0921c465ef944f1a50af55040cf7621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c3faba1-d6", "ovs_interfaceid": "6c3faba1-d6ae-4109-b137-f129167b43d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.137447] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Releasing lock "refresh_cache-e6442505-b5d0-4736-a24a-41fccda6da6f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.137909] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance network_info: |[{"id": "6c3faba1-d6ae-4109-b137-f129167b43d2", "address": "fa:16:3e:b3:87:be", "network": {"id": "8b829171-2963-4efb-b74a-73be4c3522fe", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1850443047-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0921c465ef944f1a50af55040cf7621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c3faba1-d6", "ovs_interfaceid": "6c3faba1-d6ae-4109-b137-f129167b43d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1180.138405] env[62813]: DEBUG oslo_concurrency.lockutils [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] Acquired lock "refresh_cache-e6442505-b5d0-4736-a24a-41fccda6da6f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.138665] env[62813]: DEBUG nova.network.neutron [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Refreshing network info cache for port 6c3faba1-d6ae-4109-b137-f129167b43d2 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1180.141055] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:87:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c3faba1-d6ae-4109-b137-f129167b43d2', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.148824] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating folder: Project (c0921c465ef944f1a50af55040cf7621). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1180.150740] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f17a8438-c288-4a09-acb7-fb0f68cf37e0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.165746] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Created folder: Project (c0921c465ef944f1a50af55040cf7621) in parent group-v840812. [ 1180.165955] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating folder: Instances. Parent ref: group-v840880. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1180.166220] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b1b577e-feec-4015-8c86-4cabeff9f0bf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.176989] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Created folder: Instances in parent group-v840880. [ 1180.177282] env[62813]: DEBUG oslo.service.loopingcall [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.177550] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1180.177777] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49f64494-0a78-41cc-b76c-fd033688fbec {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.199033] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.199033] env[62813]: value = "task-4267677" [ 1180.199033] env[62813]: _type = "Task" [ 1180.199033] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.210337] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267677, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.515835] env[62813]: DEBUG nova.network.neutron [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Updated VIF entry in instance network info cache for port 6c3faba1-d6ae-4109-b137-f129167b43d2. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1180.516401] env[62813]: DEBUG nova.network.neutron [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Updating instance_info_cache with network_info: [{"id": "6c3faba1-d6ae-4109-b137-f129167b43d2", "address": "fa:16:3e:b3:87:be", "network": {"id": "8b829171-2963-4efb-b74a-73be4c3522fe", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1850443047-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0921c465ef944f1a50af55040cf7621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c3faba1-d6", "ovs_interfaceid": "6c3faba1-d6ae-4109-b137-f129167b43d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.529601] env[62813]: DEBUG oslo_concurrency.lockutils [req-0adfddaa-177c-4108-a93f-87e32702bd2c req-b0ec3a80-3bd5-455c-889b-74dd7eac4f7a service nova] Releasing lock "refresh_cache-e6442505-b5d0-4736-a24a-41fccda6da6f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.714256] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267677, 'name': CreateVM_Task, 'duration_secs': 0.344633} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.714256] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1180.714256] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.714256] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.714906] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1180.715432] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31dc0dcd-e571-4755-ad51-f648f86c0972 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.722359] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 1180.722359] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5235c81f-18b3-17a3-4360-800d335ac007" [ 1180.722359] env[62813]: _type = "Task" [ 1180.722359] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.735040] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5235c81f-18b3-17a3-4360-800d335ac007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.235749] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.236101] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.236226] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.331662] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "e6442505-b5d0-4736-a24a-41fccda6da6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.357432] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.357432] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.655767] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69dbdc74-8958-4c76-b8c0-e9c1cf7a655a tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "abc5e07e-8408-4938-9831-42d828ef877d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.655767] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69dbdc74-8958-4c76-b8c0-e9c1cf7a655a tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "abc5e07e-8408-4938-9831-42d828ef877d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.053647] env[62813]: DEBUG oslo_concurrency.lockutils [None req-271da832-8aa3-4fe8-94b7-80647ca10451 tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "244be995-46ef-43fe-bec7-bdf9da081985" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.053873] env[62813]: DEBUG oslo_concurrency.lockutils [None req-271da832-8aa3-4fe8-94b7-80647ca10451 tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "244be995-46ef-43fe-bec7-bdf9da081985" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.163381] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.163792] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1196.164712] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1198.164761] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.163694] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.159772] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.163555] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.163718] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1200.163825] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1200.186734] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.187210] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.187391] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.187527] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.187653] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.187775] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.187899] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.188087] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.188149] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.188263] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1200.188379] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1201.164829] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.164829] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.176081] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.176081] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.176081] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.176081] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1201.177384] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2187b198-b7c6-4329-972f-db0a3837fab0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.187367] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30a12ae-6068-418d-8694-974b137c51ed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.203112] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60ce3b1-08b2-4e12-8275-62fc4b526390 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.210950] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ad3409-ead7-4165-81ae-9cd1f6ec90fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.242248] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180718MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1201.242493] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.242656] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.316327] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.316485] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.316655] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.316789] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.316969] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.317046] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.317153] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.317268] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.317382] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.317494] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1201.329592] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b042b18a-4efb-431a-afa6-f4dc8b4c1bd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.340737] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4de44f46-3872-46f4-afb4-308cc8b18c89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.351945] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.362056] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 4e00cefc-ffe9-41e4-9520-281d937e32ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.372906] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance ab1749b4-a87b-4bd2-bdad-b2db446f44d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.384960] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 81ac869e-c8ab-4f75-bfb7-bbb3296c24dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.399688] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 6943412a-83f2-437b-80af-4a2de7ed5029 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.411226] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 3a85472c-25b9-4fb7-a438-84fa699d7f0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.424287] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance bae5a146-4946-4e03-a6f5-062e79a61def has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.435185] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b322a084-d312-45b9-90d3-11c2180c71f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.446069] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1f26c898-895c-4256-a0c4-a1596279acc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.459558] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.468882] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.486598] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.500691] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance abc5e07e-8408-4938-9831-42d828ef877d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.519827] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 244be995-46ef-43fe-bec7-bdf9da081985 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1201.519921] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1201.520147] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1201.930323] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0f1a3e-3343-4812-b272-ed085da0fccc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.938335] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27dc51cc-d901-4ec6-8c40-f37012d15ab1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.970218] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9097797e-88ad-43d2-83c5-25941e2bc295 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.978822] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531a8593-c694-4e3c-b285-b03c4c752290 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.993856] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.004895] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1202.019585] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1202.019805] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.777s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.020352] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.381293] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c224f39d-c12d-41f3-9bfc-665fa481cad1 tempest-ServerRescueTestJSON-1480939942 tempest-ServerRescueTestJSON-1480939942-project-member] Acquiring lock "125e3e78-bb24-47c5-9096-00c6667c925a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.381663] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c224f39d-c12d-41f3-9bfc-665fa481cad1 tempest-ServerRescueTestJSON-1480939942 tempest-ServerRescueTestJSON-1480939942-project-member] Lock "125e3e78-bb24-47c5-9096-00c6667c925a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.234796] env[62813]: WARNING oslo_vmware.rw_handles [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1224.234796] env[62813]: ERROR oslo_vmware.rw_handles [ 1224.235532] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1224.237693] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1224.237903] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Copying Virtual Disk [datastore2] vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/308f29f4-bcb9-48bd-82b8-1fead83d8b6f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1224.238207] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-192be186-ead2-4bc5-bb7e-27ced96614ac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.248725] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Waiting for the task: (returnval){ [ 1224.248725] env[62813]: value = "task-4267678" [ 1224.248725] env[62813]: _type = "Task" [ 1224.248725] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.258050] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Task: {'id': task-4267678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.759778] env[62813]: DEBUG oslo_vmware.exceptions [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1224.760030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.760647] env[62813]: ERROR nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1224.760647] env[62813]: Faults: ['InvalidArgument'] [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Traceback (most recent call last): [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] yield resources [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self.driver.spawn(context, instance, image_meta, [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self._fetch_image_if_missing(context, vi) [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] image_cache(vi, tmp_image_ds_loc) [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] vm_util.copy_virtual_disk( [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] session._wait_for_task(vmdk_copy_task) [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] return self.wait_for_task(task_ref) [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] return evt.wait() [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] result = hub.switch() [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] return self.greenlet.switch() [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self.f(*self.args, **self.kw) [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] raise exceptions.translate_fault(task_info.error) [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Faults: ['InvalidArgument'] [ 1224.760647] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] [ 1224.761762] env[62813]: INFO nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Terminating instance [ 1224.762660] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.762948] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1224.763618] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1224.763807] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1224.764044] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86446d6d-1727-43e1-96a0-deefdf3691c7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.766474] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fabc38b-8b01-4c37-a64c-e8a11af3caca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.774059] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1224.774444] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e75379b5-5116-4d8e-918c-2ae57b8cea3b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.776720] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1224.776889] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1224.777870] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c55febb-b7c6-4fd8-9359-3d5454e826b7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.782868] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Waiting for the task: (returnval){ [ 1224.782868] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]520cb878-dd9e-15ec-aad5-ca0418299b22" [ 1224.782868] env[62813]: _type = "Task" [ 1224.782868] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.792050] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]520cb878-dd9e-15ec-aad5-ca0418299b22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.861163] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1224.861358] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1224.861666] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Deleting the datastore file [datastore2] 356088a2-b55e-4ff1-9422-a53ab6830fc9 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.861817] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2e54de8-0cd5-42e1-a7ef-2463910a8c37 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.870449] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Waiting for the task: (returnval){ [ 1224.870449] env[62813]: value = "task-4267680" [ 1224.870449] env[62813]: _type = "Task" [ 1224.870449] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.879053] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Task: {'id': task-4267680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.293429] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1225.293746] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Creating directory with path [datastore2] vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.294196] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adbdc04d-3c12-4d2a-8966-0ce5bdd156a6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.306656] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Created directory with path [datastore2] vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.306869] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Fetch image to [datastore2] vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1225.307191] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1225.307869] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c1074e-026f-4b05-b37c-d54e6f099412 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.315558] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689f1652-a37a-464f-91b1-1baa8836b3bf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.326421] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137dd0d0-9f5d-4fb8-b48f-3f0e3fd33e80 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.359714] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b992e72-01e5-4c55-a12d-b3f05fb14d53 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.366802] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fc54a66d-f8a1-4992-af7d-180ce4a965c9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.380719] env[62813]: DEBUG oslo_vmware.api [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Task: {'id': task-4267680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080218} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.381047] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1225.381245] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1225.381428] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1225.381645] env[62813]: INFO nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1225.384985] env[62813]: DEBUG nova.compute.claims [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1225.385216] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.385430] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.390057] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1225.621551] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1225.682630] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1225.682880] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1225.849568] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ec5838-b847-4cd1-a36f-700ba51b33cb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.857978] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b986f704-07c1-407f-84cd-8c384bc414de {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.890638] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db418540-70f7-402a-b025-15f534fd2c57 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.898689] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f4cfe7-d919-4817-843e-b109e57cc5f5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.913102] env[62813]: DEBUG nova.compute.provider_tree [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.922578] env[62813]: DEBUG nova.scheduler.client.report [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1225.937191] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.552s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.937747] env[62813]: ERROR nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1225.937747] env[62813]: Faults: ['InvalidArgument'] [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Traceback (most recent call last): [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self.driver.spawn(context, instance, image_meta, [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self._fetch_image_if_missing(context, vi) [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] image_cache(vi, tmp_image_ds_loc) [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] vm_util.copy_virtual_disk( [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] session._wait_for_task(vmdk_copy_task) [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] return self.wait_for_task(task_ref) [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] return evt.wait() [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] result = hub.switch() [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] return self.greenlet.switch() [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] self.f(*self.args, **self.kw) [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] raise exceptions.translate_fault(task_info.error) [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Faults: ['InvalidArgument'] [ 1225.937747] env[62813]: ERROR nova.compute.manager [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] [ 1225.938765] env[62813]: DEBUG nova.compute.utils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1225.939924] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Build of instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 was re-scheduled: A specified parameter was not correct: fileType [ 1225.939924] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1225.940321] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1225.940499] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1225.940681] env[62813]: DEBUG nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1225.940841] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1226.316029] env[62813]: DEBUG nova.network.neutron [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.326910] env[62813]: INFO nova.compute.manager [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Took 0.39 seconds to deallocate network for instance. [ 1226.451709] env[62813]: INFO nova.scheduler.client.report [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Deleted allocations for instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 [ 1226.476173] env[62813]: DEBUG oslo_concurrency.lockutils [None req-9a4f1887-0849-4267-a524-2aa209597603 tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 613.141s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.477824] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 414.915s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.477824] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Acquiring lock "356088a2-b55e-4ff1-9422-a53ab6830fc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.477824] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.478293] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.481704] env[62813]: INFO nova.compute.manager [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Terminating instance [ 1226.483537] env[62813]: DEBUG nova.compute.manager [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1226.483731] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1226.484010] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4457366-f0ae-4d10-b346-54823e4d9fa6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.489182] env[62813]: DEBUG nova.compute.manager [None req-013c758e-ef81-4bd1-b37a-8073f35f6878 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] [instance: f0d3a02d-ddb9-4338-989e-e256fb50ede5] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1226.495637] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaa49c4-d386-40ce-8320-185d500f6635 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.527109] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 356088a2-b55e-4ff1-9422-a53ab6830fc9 could not be found. [ 1226.527351] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1226.527696] env[62813]: INFO nova.compute.manager [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1226.527804] env[62813]: DEBUG oslo.service.loopingcall [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1226.528025] env[62813]: DEBUG nova.compute.manager [-] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1226.528133] env[62813]: DEBUG nova.network.neutron [-] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1226.544542] env[62813]: DEBUG nova.compute.manager [None req-013c758e-ef81-4bd1-b37a-8073f35f6878 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] [instance: f0d3a02d-ddb9-4338-989e-e256fb50ede5] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1226.567493] env[62813]: DEBUG nova.network.neutron [-] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.573140] env[62813]: DEBUG oslo_concurrency.lockutils [None req-013c758e-ef81-4bd1-b37a-8073f35f6878 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] Lock "f0d3a02d-ddb9-4338-989e-e256fb50ede5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.003s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.576310] env[62813]: INFO nova.compute.manager [-] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] Took 0.05 seconds to deallocate network for instance. [ 1226.585120] env[62813]: DEBUG nova.compute.manager [None req-743bab17-50f4-4541-aa15-e4dfa42d6a92 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] [instance: e261660b-b9ee-487c-b044-ce1325c8e2ac] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1226.612543] env[62813]: DEBUG nova.compute.manager [None req-743bab17-50f4-4541-aa15-e4dfa42d6a92 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] [instance: e261660b-b9ee-487c-b044-ce1325c8e2ac] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1226.639337] env[62813]: DEBUG oslo_concurrency.lockutils [None req-743bab17-50f4-4541-aa15-e4dfa42d6a92 tempest-ServerRescueNegativeTestJSON-351686158 tempest-ServerRescueNegativeTestJSON-351686158-project-member] Lock "e261660b-b9ee-487c-b044-ce1325c8e2ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.019s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.649887] env[62813]: DEBUG nova.compute.manager [None req-741b79bd-e8f1-4427-afb5-ff8252feca99 tempest-ServerTagsTestJSON-1563933505 tempest-ServerTagsTestJSON-1563933505-project-member] [instance: b042b18a-4efb-431a-afa6-f4dc8b4c1bd7] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1226.685698] env[62813]: DEBUG nova.compute.manager [None req-741b79bd-e8f1-4427-afb5-ff8252feca99 tempest-ServerTagsTestJSON-1563933505 tempest-ServerTagsTestJSON-1563933505-project-member] [instance: b042b18a-4efb-431a-afa6-f4dc8b4c1bd7] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1226.700285] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0ef78100-3551-4259-860b-40e05765138b tempest-VolumesAssistedSnapshotsTest-595928432 tempest-VolumesAssistedSnapshotsTest-595928432-project-member] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.223s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.702560] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 78.006s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.702779] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 356088a2-b55e-4ff1-9422-a53ab6830fc9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1226.702965] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "356088a2-b55e-4ff1-9422-a53ab6830fc9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.711616] env[62813]: DEBUG oslo_concurrency.lockutils [None req-741b79bd-e8f1-4427-afb5-ff8252feca99 tempest-ServerTagsTestJSON-1563933505 tempest-ServerTagsTestJSON-1563933505-project-member] Lock "b042b18a-4efb-431a-afa6-f4dc8b4c1bd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.432s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.722253] env[62813]: DEBUG nova.compute.manager [None req-f2a95070-73a4-40ef-bae4-114289d18933 tempest-ServerAddressesTestJSON-559922155 tempest-ServerAddressesTestJSON-559922155-project-member] [instance: 4de44f46-3872-46f4-afb4-308cc8b18c89] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1226.746325] env[62813]: DEBUG nova.compute.manager [None req-f2a95070-73a4-40ef-bae4-114289d18933 tempest-ServerAddressesTestJSON-559922155 tempest-ServerAddressesTestJSON-559922155-project-member] [instance: 4de44f46-3872-46f4-afb4-308cc8b18c89] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1226.769438] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f2a95070-73a4-40ef-bae4-114289d18933 tempest-ServerAddressesTestJSON-559922155 tempest-ServerAddressesTestJSON-559922155-project-member] Lock "4de44f46-3872-46f4-afb4-308cc8b18c89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.303s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.779290] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1226.840177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.840874] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.842074] env[62813]: INFO nova.compute.claims [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.186320] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5d3558-82fa-472e-ab3f-cad170e8e441 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.195518] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b0f2d0-a1c1-40e2-aec2-abaf5ba58d6b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.226186] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2ee2c7-d12b-4490-834a-b64f4c442f6e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.235115] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4fc16a-a09c-421d-a28b-ddc79123c545 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.250347] env[62813]: DEBUG nova.compute.provider_tree [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.259351] env[62813]: DEBUG nova.scheduler.client.report [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1227.273776] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.274277] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1227.310591] env[62813]: DEBUG nova.compute.utils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1227.311807] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1227.311991] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1227.320689] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1227.368244] env[62813]: DEBUG nova.policy [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e5639b4c294098ac97eae52872b91c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dab4ddba893f4b47886bb54e9083c414', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1227.390067] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1227.417693] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1227.417994] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1227.418185] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1227.418403] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1227.418556] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1227.418709] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1227.418924] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1227.419113] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1227.419291] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1227.419469] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1227.419647] env[62813]: DEBUG nova.virt.hardware [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1227.420592] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ce97f5-5db0-4a5b-b96b-b2715c229d4b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.429729] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c11423-be6f-40cd-b7b9-6340d15e98f9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.982897] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Successfully created port: 40ed8bf6-d2f1-4853-b016-1e3327151755 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1228.674502] env[62813]: DEBUG nova.compute.manager [req-273d015f-7fc0-4210-a87e-a81b3c92b352 req-7000eeaa-d6ce-4d79-b503-d8ea1145a7e7 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Received event network-vif-plugged-40ed8bf6-d2f1-4853-b016-1e3327151755 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1228.675911] env[62813]: DEBUG oslo_concurrency.lockutils [req-273d015f-7fc0-4210-a87e-a81b3c92b352 req-7000eeaa-d6ce-4d79-b503-d8ea1145a7e7 service nova] Acquiring lock "5655255a-1d03-4854-b8ad-d77643f9b9c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.676355] env[62813]: DEBUG oslo_concurrency.lockutils [req-273d015f-7fc0-4210-a87e-a81b3c92b352 req-7000eeaa-d6ce-4d79-b503-d8ea1145a7e7 service nova] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.677621] env[62813]: DEBUG oslo_concurrency.lockutils [req-273d015f-7fc0-4210-a87e-a81b3c92b352 req-7000eeaa-d6ce-4d79-b503-d8ea1145a7e7 service nova] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.677621] env[62813]: DEBUG nova.compute.manager [req-273d015f-7fc0-4210-a87e-a81b3c92b352 req-7000eeaa-d6ce-4d79-b503-d8ea1145a7e7 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] No waiting events found dispatching network-vif-plugged-40ed8bf6-d2f1-4853-b016-1e3327151755 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1228.677621] env[62813]: WARNING nova.compute.manager [req-273d015f-7fc0-4210-a87e-a81b3c92b352 req-7000eeaa-d6ce-4d79-b503-d8ea1145a7e7 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Received unexpected event network-vif-plugged-40ed8bf6-d2f1-4853-b016-1e3327151755 for instance with vm_state building and task_state spawning. [ 1228.768859] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Successfully updated port: 40ed8bf6-d2f1-4853-b016-1e3327151755 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1228.782681] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "refresh_cache-5655255a-1d03-4854-b8ad-d77643f9b9c6" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.782681] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "refresh_cache-5655255a-1d03-4854-b8ad-d77643f9b9c6" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.782681] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1228.836887] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1229.062101] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Updating instance_info_cache with network_info: [{"id": "40ed8bf6-d2f1-4853-b016-1e3327151755", "address": "fa:16:3e:e4:06:bd", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ed8bf6-d2", "ovs_interfaceid": "40ed8bf6-d2f1-4853-b016-1e3327151755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.082535] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "refresh_cache-5655255a-1d03-4854-b8ad-d77643f9b9c6" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.082808] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance network_info: |[{"id": "40ed8bf6-d2f1-4853-b016-1e3327151755", "address": "fa:16:3e:e4:06:bd", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ed8bf6-d2", "ovs_interfaceid": "40ed8bf6-d2f1-4853-b016-1e3327151755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1229.083507] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:06:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4fcde7-8926-402a-a9b7-4878d2bc1cf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40ed8bf6-d2f1-4853-b016-1e3327151755', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1229.091650] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating folder: Project (dab4ddba893f4b47886bb54e9083c414). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1229.092226] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5ba759c-11bf-47c0-ae52-c7034de9595a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.103989] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created folder: Project (dab4ddba893f4b47886bb54e9083c414) in parent group-v840812. [ 1229.104240] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating folder: Instances. Parent ref: group-v840883. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1229.104494] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32f9dac6-7399-4b16-aa8b-1fe77ff3fe7d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.113459] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created folder: Instances in parent group-v840883. [ 1229.113689] env[62813]: DEBUG oslo.service.loopingcall [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1229.113905] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1229.114142] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a91927c4-9d7d-4fbb-a98a-02b5b5a64ef6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.136355] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1229.136355] env[62813]: value = "task-4267683" [ 1229.136355] env[62813]: _type = "Task" [ 1229.136355] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.147192] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267683, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.646556] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267683, 'name': CreateVM_Task, 'duration_secs': 0.325699} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.646822] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1229.654537] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.654732] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.655269] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1229.655513] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-970587d4-ae51-4354-bff5-735e104499fd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.662480] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1229.662480] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5285430b-c242-7155-bbb4-070143403e0e" [ 1229.662480] env[62813]: _type = "Task" [ 1229.662480] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.671603] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5285430b-c242-7155-bbb4-070143403e0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.173317] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.173653] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1230.173963] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.727588] env[62813]: DEBUG nova.compute.manager [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Received event network-changed-40ed8bf6-d2f1-4853-b016-1e3327151755 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1230.727779] env[62813]: DEBUG nova.compute.manager [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Refreshing instance network info cache due to event network-changed-40ed8bf6-d2f1-4853-b016-1e3327151755. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1230.728025] env[62813]: DEBUG oslo_concurrency.lockutils [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] Acquiring lock "refresh_cache-5655255a-1d03-4854-b8ad-d77643f9b9c6" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.728275] env[62813]: DEBUG oslo_concurrency.lockutils [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] Acquired lock "refresh_cache-5655255a-1d03-4854-b8ad-d77643f9b9c6" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.728669] env[62813]: DEBUG nova.network.neutron [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Refreshing network info cache for port 40ed8bf6-d2f1-4853-b016-1e3327151755 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1230.769800] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ac62f249-18c2-4e7c-8415-3bd58f925002 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "9fdaddae-c9a4-4867-9f80-91e70efd2b51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.770047] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ac62f249-18c2-4e7c-8415-3bd58f925002 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "9fdaddae-c9a4-4867-9f80-91e70efd2b51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.024832] env[62813]: DEBUG nova.network.neutron [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Updated VIF entry in instance network info cache for port 40ed8bf6-d2f1-4853-b016-1e3327151755. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1231.025294] env[62813]: DEBUG nova.network.neutron [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Updating instance_info_cache with network_info: [{"id": "40ed8bf6-d2f1-4853-b016-1e3327151755", "address": "fa:16:3e:e4:06:bd", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40ed8bf6-d2", "ovs_interfaceid": "40ed8bf6-d2f1-4853-b016-1e3327151755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.035862] env[62813]: DEBUG oslo_concurrency.lockutils [req-d6b2c8ff-e2b6-47ef-bb31-dd867083745b req-d566a852-97fa-4419-b80d-1d334f5a51e4 service nova] Releasing lock "refresh_cache-5655255a-1d03-4854-b8ad-d77643f9b9c6" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.544911] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "5655255a-1d03-4854-b8ad-d77643f9b9c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.849359] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.849707] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.811598] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b2f93154-c0af-48b7-89ca-1da59a7f9f83 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Acquiring lock "b06482cf-8823-41af-a940-2dfb5d72e70e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.811892] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b2f93154-c0af-48b7-89ca-1da59a7f9f83 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "b06482cf-8823-41af-a940-2dfb5d72e70e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.208661] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4faa9eba-b19b-4e54-9869-87a2b15bb28f tempest-ServersListShow296Test-1233843078 tempest-ServersListShow296Test-1233843078-project-member] Acquiring lock "8b7930d8-8117-4b3d-8218-39bff602a4b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.209059] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4faa9eba-b19b-4e54-9869-87a2b15bb28f tempest-ServersListShow296Test-1233843078 tempest-ServersListShow296Test-1233843078-project-member] Lock "8b7930d8-8117-4b3d-8218-39bff602a4b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.439429] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3877d6ba-930c-405b-b741-88971af2c20c tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Acquiring lock "467d9133-7fe6-44e7-9ffb-f0edacf3be81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.439429] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3877d6ba-930c-405b-b741-88971af2c20c tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "467d9133-7fe6-44e7-9ffb-f0edacf3be81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.163844] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.163844] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1257.164810] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.164360] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.159772] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.163425] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.164099] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.164826] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1262.165148] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1262.188114] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.188295] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.188432] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.188564] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.188689] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.188814] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.188935] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.189067] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.189189] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.189313] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1262.189435] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1262.189943] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.190134] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.164962] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.176192] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.176447] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.176633] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.176792] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1263.177947] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc86bc5-c8c4-4a09-8ec2-3cfd61f760f6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.186919] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30890a4-b5d4-4a08-89da-4d5e04e3c67e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.200716] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb762c7f-3127-45b6-93b4-679438f9ebdd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.207492] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe2dad0-aa32-41d9-9fac-cdbc4df2b33a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.237231] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180777MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1263.237431] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.237575] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.316020] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 489b821e-f7d0-446f-8197-550c808e5a99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.316267] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.316399] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.316532] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.316659] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.316796] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.316920] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.317064] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.317189] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.317325] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1263.334149] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b322a084-d312-45b9-90d3-11c2180c71f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.345121] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1f26c898-895c-4256-a0c4-a1596279acc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.356912] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.368325] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.379320] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.390872] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance abc5e07e-8408-4938-9831-42d828ef877d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.401951] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 244be995-46ef-43fe-bec7-bdf9da081985 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.413859] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 125e3e78-bb24-47c5-9096-00c6667c925a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.424893] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9fdaddae-c9a4-4867-9f80-91e70efd2b51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.436033] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.446897] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b06482cf-8823-41af-a940-2dfb5d72e70e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.458210] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 8b7930d8-8117-4b3d-8218-39bff602a4b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.469530] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 467d9133-7fe6-44e7-9ffb-f0edacf3be81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1263.469783] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1263.470185] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1263.772050] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec55da8a-723f-4bfa-8f27-f84c5f4ba94a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.780393] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01833b7c-d756-4b2e-bc55-d5b2e51715d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.813370] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe2ca43-0a66-446c-b733-cd74b97a0aa2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.822248] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c9cf29-0565-4b78-9ca2-4f86be30c238 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.836340] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.847213] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1263.867560] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1263.867938] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.630s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.864064] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.675892] env[62813]: WARNING oslo_vmware.rw_handles [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1274.675892] env[62813]: ERROR oslo_vmware.rw_handles [ 1274.676557] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1274.678162] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1274.678409] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Copying Virtual Disk [datastore2] vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/0198771c-9111-4a9f-9d7f-52ef321b0e3c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1274.678696] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e157a82-d86e-49fe-a2ae-efd8c98f50b8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.686928] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Waiting for the task: (returnval){ [ 1274.686928] env[62813]: value = "task-4267684" [ 1274.686928] env[62813]: _type = "Task" [ 1274.686928] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.696208] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Task: {'id': task-4267684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.197227] env[62813]: DEBUG oslo_vmware.exceptions [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1275.197524] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.198144] env[62813]: ERROR nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1275.198144] env[62813]: Faults: ['InvalidArgument'] [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Traceback (most recent call last): [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] yield resources [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self.driver.spawn(context, instance, image_meta, [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self._fetch_image_if_missing(context, vi) [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] image_cache(vi, tmp_image_ds_loc) [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] vm_util.copy_virtual_disk( [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] session._wait_for_task(vmdk_copy_task) [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] return self.wait_for_task(task_ref) [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] return evt.wait() [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] result = hub.switch() [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] return self.greenlet.switch() [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self.f(*self.args, **self.kw) [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] raise exceptions.translate_fault(task_info.error) [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Faults: ['InvalidArgument'] [ 1275.198144] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] [ 1275.199448] env[62813]: INFO nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Terminating instance [ 1275.200171] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.200361] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.200599] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a0eedc6-d509-4dd6-98bf-5c6048458ecb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.202987] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1275.203204] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1275.203950] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e755600-5dc1-4602-acae-3fa8f8379966 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.212175] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1275.212413] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-693841bc-23c8-4483-a473-05b47e577b20 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.215909] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.216113] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1275.216842] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7e014d5-cc4f-44f6-9662-3a8dc843342a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.222723] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Waiting for the task: (returnval){ [ 1275.222723] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52b71b8a-346b-be76-c595-9047931f9f9e" [ 1275.222723] env[62813]: _type = "Task" [ 1275.222723] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.235143] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52b71b8a-346b-be76-c595-9047931f9f9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.290033] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1275.290033] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1275.290033] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Deleting the datastore file [datastore2] 489b821e-f7d0-446f-8197-550c808e5a99 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1275.290033] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2523e97e-e403-4d87-a7c3-c69a658bfee5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.296665] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Waiting for the task: (returnval){ [ 1275.296665] env[62813]: value = "task-4267686" [ 1275.296665] env[62813]: _type = "Task" [ 1275.296665] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.306195] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Task: {'id': task-4267686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.737022] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1275.737022] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Creating directory with path [datastore2] vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.737022] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1623a5a2-7518-49c9-92c4-f027a1176081 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.749064] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Created directory with path [datastore2] vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.749064] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Fetch image to [datastore2] vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1275.749064] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1275.749064] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09ee614-585f-4d3b-b13a-90e5e3837435 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.758192] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c130e79e-bae0-4a66-9ecd-1aaf865f962a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.769024] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1833b6-6178-462e-a4c2-0832593cffa6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.805074] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fa8dae-512c-4f71-9bb3-dd3080e8cf8d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.816127] env[62813]: DEBUG oslo_vmware.api [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Task: {'id': task-4267686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076786} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.816127] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.816127] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1275.816127] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1275.816127] env[62813]: INFO nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1275.817834] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9bc2c00f-cf00-4482-a328-40c2ee9115ed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.820048] env[62813]: DEBUG nova.compute.claims [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1275.820423] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.820788] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.847314] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1276.040446] env[62813]: DEBUG oslo_vmware.rw_handles [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1276.104584] env[62813]: DEBUG oslo_vmware.rw_handles [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1276.104824] env[62813]: DEBUG oslo_vmware.rw_handles [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1276.212217] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3747329b-e33d-462b-a615-efd0e0e6fc56 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.221898] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b86208-cca7-46ca-900f-76bce605a2aa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.251392] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75dd754-1488-4782-b9de-a59cc9e46ddf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.259563] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62254c23-4e4e-4612-b656-2aba5bf8a9a6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.273354] env[62813]: DEBUG nova.compute.provider_tree [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.284219] env[62813]: DEBUG nova.scheduler.client.report [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1276.302124] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.481s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.302735] env[62813]: ERROR nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.302735] env[62813]: Faults: ['InvalidArgument'] [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Traceback (most recent call last): [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self.driver.spawn(context, instance, image_meta, [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self._fetch_image_if_missing(context, vi) [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] image_cache(vi, tmp_image_ds_loc) [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] vm_util.copy_virtual_disk( [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] session._wait_for_task(vmdk_copy_task) [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] return self.wait_for_task(task_ref) [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] return evt.wait() [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] result = hub.switch() [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] return self.greenlet.switch() [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] self.f(*self.args, **self.kw) [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] raise exceptions.translate_fault(task_info.error) [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Faults: ['InvalidArgument'] [ 1276.302735] env[62813]: ERROR nova.compute.manager [instance: 489b821e-f7d0-446f-8197-550c808e5a99] [ 1276.303852] env[62813]: DEBUG nova.compute.utils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1276.305194] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Build of instance 489b821e-f7d0-446f-8197-550c808e5a99 was re-scheduled: A specified parameter was not correct: fileType [ 1276.305194] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1276.305621] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1276.305815] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1276.305989] env[62813]: DEBUG nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1276.306175] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1276.956489] env[62813]: DEBUG nova.network.neutron [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.972789] env[62813]: INFO nova.compute.manager [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Took 0.67 seconds to deallocate network for instance. [ 1277.100516] env[62813]: INFO nova.scheduler.client.report [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Deleted allocations for instance 489b821e-f7d0-446f-8197-550c808e5a99 [ 1277.121021] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d333332d-7bb0-4c48-b0f2-ec770fcfe286 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "489b821e-f7d0-446f-8197-550c808e5a99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 663.378s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.122215] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "489b821e-f7d0-446f-8197-550c808e5a99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 465.735s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.122426] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Acquiring lock "489b821e-f7d0-446f-8197-550c808e5a99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.122631] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "489b821e-f7d0-446f-8197-550c808e5a99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.122794] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "489b821e-f7d0-446f-8197-550c808e5a99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.124909] env[62813]: INFO nova.compute.manager [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Terminating instance [ 1277.126872] env[62813]: DEBUG nova.compute.manager [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1277.127116] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1277.127817] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41618fbd-eb83-4e56-8e9f-dc3fddddf470 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.139252] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb5dd5c-8725-4c65-8831-b703beef87ac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.153710] env[62813]: DEBUG nova.compute.manager [None req-d567675f-9f2e-4f11-9227-89e31afb0468 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: a074bbe0-a497-4aab-93f2-9a9aa6140290] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.177119] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 489b821e-f7d0-446f-8197-550c808e5a99 could not be found. [ 1277.177407] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1277.177634] env[62813]: INFO nova.compute.manager [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1277.177921] env[62813]: DEBUG oslo.service.loopingcall [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.178220] env[62813]: DEBUG nova.compute.manager [-] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1277.178346] env[62813]: DEBUG nova.network.neutron [-] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1277.194744] env[62813]: DEBUG nova.compute.manager [None req-d567675f-9f2e-4f11-9227-89e31afb0468 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: a074bbe0-a497-4aab-93f2-9a9aa6140290] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.207828] env[62813]: DEBUG nova.network.neutron [-] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.220878] env[62813]: INFO nova.compute.manager [-] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] Took 0.04 seconds to deallocate network for instance. [ 1277.236457] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d567675f-9f2e-4f11-9227-89e31afb0468 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "a074bbe0-a497-4aab-93f2-9a9aa6140290" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.333s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.247415] env[62813]: DEBUG nova.compute.manager [None req-54050b8f-279d-4d1a-b80d-85a1bdc9029b tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 4e00cefc-ffe9-41e4-9520-281d937e32ce] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.278012] env[62813]: DEBUG nova.compute.manager [None req-54050b8f-279d-4d1a-b80d-85a1bdc9029b tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 4e00cefc-ffe9-41e4-9520-281d937e32ce] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.303751] env[62813]: DEBUG oslo_concurrency.lockutils [None req-54050b8f-279d-4d1a-b80d-85a1bdc9029b tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "4e00cefc-ffe9-41e4-9520-281d937e32ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.307s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.319150] env[62813]: DEBUG nova.compute.manager [None req-11116210-719c-4d94-ab15-7d7ebd63737c tempest-ServerActionsTestOtherB-1764703617 tempest-ServerActionsTestOtherB-1764703617-project-member] [instance: ab1749b4-a87b-4bd2-bdad-b2db446f44d9] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.352418] env[62813]: DEBUG nova.compute.manager [None req-11116210-719c-4d94-ab15-7d7ebd63737c tempest-ServerActionsTestOtherB-1764703617 tempest-ServerActionsTestOtherB-1764703617-project-member] [instance: ab1749b4-a87b-4bd2-bdad-b2db446f44d9] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.373669] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5eb55ab9-4df7-4720-bb2f-bdf0482b2815 tempest-ServersTestJSON-54998654 tempest-ServersTestJSON-54998654-project-member] Lock "489b821e-f7d0-446f-8197-550c808e5a99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.251s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.374901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "489b821e-f7d0-446f-8197-550c808e5a99" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 128.678s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.375131] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 489b821e-f7d0-446f-8197-550c808e5a99] During sync_power_state the instance has a pending task (deleting). Skip. [ 1277.375311] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "489b821e-f7d0-446f-8197-550c808e5a99" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.377377] env[62813]: DEBUG oslo_concurrency.lockutils [None req-11116210-719c-4d94-ab15-7d7ebd63737c tempest-ServerActionsTestOtherB-1764703617 tempest-ServerActionsTestOtherB-1764703617-project-member] Lock "ab1749b4-a87b-4bd2-bdad-b2db446f44d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.286s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.391076] env[62813]: DEBUG nova.compute.manager [None req-d63da806-40b6-4bea-8de4-0f31634227fa tempest-FloatingIPsAssociationTestJSON-1590727866 tempest-FloatingIPsAssociationTestJSON-1590727866-project-member] [instance: 81ac869e-c8ab-4f75-bfb7-bbb3296c24dd] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.416047] env[62813]: DEBUG nova.compute.manager [None req-d63da806-40b6-4bea-8de4-0f31634227fa tempest-FloatingIPsAssociationTestJSON-1590727866 tempest-FloatingIPsAssociationTestJSON-1590727866-project-member] [instance: 81ac869e-c8ab-4f75-bfb7-bbb3296c24dd] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.446093] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d63da806-40b6-4bea-8de4-0f31634227fa tempest-FloatingIPsAssociationTestJSON-1590727866 tempest-FloatingIPsAssociationTestJSON-1590727866-project-member] Lock "81ac869e-c8ab-4f75-bfb7-bbb3296c24dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.474s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.455073] env[62813]: DEBUG nova.compute.manager [None req-192f62c9-4564-48b3-b7cf-ba1a8d9ba39b tempest-ServersTestBootFromVolume-127445550 tempest-ServersTestBootFromVolume-127445550-project-member] [instance: 6943412a-83f2-437b-80af-4a2de7ed5029] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.481071] env[62813]: DEBUG nova.compute.manager [None req-192f62c9-4564-48b3-b7cf-ba1a8d9ba39b tempest-ServersTestBootFromVolume-127445550 tempest-ServersTestBootFromVolume-127445550-project-member] [instance: 6943412a-83f2-437b-80af-4a2de7ed5029] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.517561] env[62813]: DEBUG oslo_concurrency.lockutils [None req-192f62c9-4564-48b3-b7cf-ba1a8d9ba39b tempest-ServersTestBootFromVolume-127445550 tempest-ServersTestBootFromVolume-127445550-project-member] Lock "6943412a-83f2-437b-80af-4a2de7ed5029" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.012s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.529565] env[62813]: DEBUG nova.compute.manager [None req-bc3b853c-5d2d-4a0b-88d4-62fd016a765d tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: 3a85472c-25b9-4fb7-a438-84fa699d7f0c] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.558469] env[62813]: DEBUG nova.compute.manager [None req-bc3b853c-5d2d-4a0b-88d4-62fd016a765d tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: 3a85472c-25b9-4fb7-a438-84fa699d7f0c] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.588170] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bc3b853c-5d2d-4a0b-88d4-62fd016a765d tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "3a85472c-25b9-4fb7-a438-84fa699d7f0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.055s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.609628] env[62813]: DEBUG nova.compute.manager [None req-57e07797-9a6b-4bdf-923c-cd3c06b3edf6 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: bae5a146-4946-4e03-a6f5-062e79a61def] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.635805] env[62813]: DEBUG nova.compute.manager [None req-57e07797-9a6b-4bdf-923c-cd3c06b3edf6 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: bae5a146-4946-4e03-a6f5-062e79a61def] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.664268] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57e07797-9a6b-4bdf-923c-cd3c06b3edf6 tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "bae5a146-4946-4e03-a6f5-062e79a61def" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.579s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.681673] env[62813]: DEBUG nova.compute.manager [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: b322a084-d312-45b9-90d3-11c2180c71f8] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.708531] env[62813]: DEBUG nova.compute.manager [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: b322a084-d312-45b9-90d3-11c2180c71f8] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.737096] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "b322a084-d312-45b9-90d3-11c2180c71f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.778s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.748327] env[62813]: DEBUG nova.compute.manager [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 1f26c898-895c-4256-a0c4-a1596279acc5] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.775020] env[62813]: DEBUG nova.compute.manager [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] [instance: 1f26c898-895c-4256-a0c4-a1596279acc5] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.797014] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6045ed6b-6075-4ae0-a59d-19b602c80b2f tempest-MultipleCreateTestJSON-1757949274 tempest-MultipleCreateTestJSON-1757949274-project-member] Lock "1f26c898-895c-4256-a0c4-a1596279acc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.812s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.806787] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.871325] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.871596] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.873108] env[62813]: INFO nova.compute.claims [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.222337] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec1e313-d285-4287-841a-e557b395421d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.232255] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59f09d9-cf68-47ec-a1e9-d4f4e3b027d8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.264089] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128b9f63-d5eb-4667-9d0c-d961353d2349 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.272347] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193e8656-c5e9-4bb0-ac41-422ed77bacac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.286931] env[62813]: DEBUG nova.compute.provider_tree [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.295961] env[62813]: DEBUG nova.scheduler.client.report [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1278.314231] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.442s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.314231] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1278.351124] env[62813]: DEBUG nova.compute.utils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.354539] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1278.354539] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1278.366413] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1278.454456] env[62813]: DEBUG nova.policy [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c3f4a1c3e1a4c1da8a5fb37f1d47b44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df3b950b677a484f8bbcebfa07d3f4da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1278.456920] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1278.485135] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1278.485777] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1278.485777] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.485777] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1278.485914] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.486028] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1278.486518] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1278.486518] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1278.486630] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1278.486863] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1278.486945] env[62813]: DEBUG nova.virt.hardware [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1278.489707] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaeb404-07e9-4393-b595-9f06aa36445c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.500680] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1f4207-11c0-48c2-9c22-7ffa209139af {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.001739] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Successfully created port: 59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.697386] env[62813]: DEBUG nova.compute.manager [req-7a2a8697-9790-4be9-ae5c-56e64ddb91f0 req-8809df7c-da44-46f4-85f9-dc104d79becd service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Received event network-vif-plugged-59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1279.697665] env[62813]: DEBUG oslo_concurrency.lockutils [req-7a2a8697-9790-4be9-ae5c-56e64ddb91f0 req-8809df7c-da44-46f4-85f9-dc104d79becd service nova] Acquiring lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.698185] env[62813]: DEBUG oslo_concurrency.lockutils [req-7a2a8697-9790-4be9-ae5c-56e64ddb91f0 req-8809df7c-da44-46f4-85f9-dc104d79becd service nova] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.698299] env[62813]: DEBUG oslo_concurrency.lockutils [req-7a2a8697-9790-4be9-ae5c-56e64ddb91f0 req-8809df7c-da44-46f4-85f9-dc104d79becd service nova] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.698572] env[62813]: DEBUG nova.compute.manager [req-7a2a8697-9790-4be9-ae5c-56e64ddb91f0 req-8809df7c-da44-46f4-85f9-dc104d79becd service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] No waiting events found dispatching network-vif-plugged-59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1279.698788] env[62813]: WARNING nova.compute.manager [req-7a2a8697-9790-4be9-ae5c-56e64ddb91f0 req-8809df7c-da44-46f4-85f9-dc104d79becd service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Received unexpected event network-vif-plugged-59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 for instance with vm_state building and task_state spawning. [ 1279.786375] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Successfully updated port: 59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.800982] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "refresh_cache-5d0e1cb1-9f54-4a76-960c-99d0803afd2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.801144] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquired lock "refresh_cache-5d0e1cb1-9f54-4a76-960c-99d0803afd2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.801305] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1279.900173] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1280.339973] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Updating instance_info_cache with network_info: [{"id": "59d79ae9-7dbb-48f4-b2dd-5b31f81397d2", "address": "fa:16:3e:32:0b:02", "network": {"id": "216857b8-b370-4e62-9396-30667f455c82", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-208576348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df3b950b677a484f8bbcebfa07d3f4da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d79ae9-7d", "ovs_interfaceid": "59d79ae9-7dbb-48f4-b2dd-5b31f81397d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.356580] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Releasing lock "refresh_cache-5d0e1cb1-9f54-4a76-960c-99d0803afd2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.356922] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance network_info: |[{"id": "59d79ae9-7dbb-48f4-b2dd-5b31f81397d2", "address": "fa:16:3e:32:0b:02", "network": {"id": "216857b8-b370-4e62-9396-30667f455c82", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-208576348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df3b950b677a484f8bbcebfa07d3f4da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d79ae9-7d", "ovs_interfaceid": "59d79ae9-7dbb-48f4-b2dd-5b31f81397d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.357393] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:0b:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59d79ae9-7dbb-48f4-b2dd-5b31f81397d2', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.365418] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Creating folder: Project (df3b950b677a484f8bbcebfa07d3f4da). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.366089] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81806fd5-4238-49dc-9f76-2e2f0ed7d0c2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.379631] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Created folder: Project (df3b950b677a484f8bbcebfa07d3f4da) in parent group-v840812. [ 1280.379631] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Creating folder: Instances. Parent ref: group-v840886. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.379851] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b3c25a5-25a1-4753-83cc-f56b715f7a38 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.391210] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Created folder: Instances in parent group-v840886. [ 1280.391468] env[62813]: DEBUG oslo.service.loopingcall [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.391672] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1280.391920] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45daa402-ed42-43b2-a0a0-431779276ede {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.412138] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.412138] env[62813]: value = "task-4267689" [ 1280.412138] env[62813]: _type = "Task" [ 1280.412138] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.420849] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267689, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.923722] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267689, 'name': CreateVM_Task, 'duration_secs': 0.37043} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.924130] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1280.924724] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.924883] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.925252] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1280.925528] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92096bf8-17b9-420d-84a1-f8e471cb49c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.932186] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Waiting for the task: (returnval){ [ 1280.932186] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52013e31-6b3b-a3d8-9943-63e7ff5f1d3b" [ 1280.932186] env[62813]: _type = "Task" [ 1280.932186] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.941120] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52013e31-6b3b-a3d8-9943-63e7ff5f1d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.443789] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.444057] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.444272] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.764798] env[62813]: DEBUG nova.compute.manager [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Received event network-changed-59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1281.764798] env[62813]: DEBUG nova.compute.manager [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Refreshing instance network info cache due to event network-changed-59d79ae9-7dbb-48f4-b2dd-5b31f81397d2. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1281.764798] env[62813]: DEBUG oslo_concurrency.lockutils [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] Acquiring lock "refresh_cache-5d0e1cb1-9f54-4a76-960c-99d0803afd2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.764798] env[62813]: DEBUG oslo_concurrency.lockutils [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] Acquired lock "refresh_cache-5d0e1cb1-9f54-4a76-960c-99d0803afd2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.764798] env[62813]: DEBUG nova.network.neutron [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Refreshing network info cache for port 59d79ae9-7dbb-48f4-b2dd-5b31f81397d2 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1282.156859] env[62813]: DEBUG nova.network.neutron [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Updated VIF entry in instance network info cache for port 59d79ae9-7dbb-48f4-b2dd-5b31f81397d2. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1282.157843] env[62813]: DEBUG nova.network.neutron [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Updating instance_info_cache with network_info: [{"id": "59d79ae9-7dbb-48f4-b2dd-5b31f81397d2", "address": "fa:16:3e:32:0b:02", "network": {"id": "216857b8-b370-4e62-9396-30667f455c82", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-208576348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df3b950b677a484f8bbcebfa07d3f4da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d79ae9-7d", "ovs_interfaceid": "59d79ae9-7dbb-48f4-b2dd-5b31f81397d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.174262] env[62813]: DEBUG oslo_concurrency.lockutils [req-019e2704-7695-4064-a087-7e4171e9b57a req-dd9e8ab6-b3ff-4425-b477-713b2773f775 service nova] Releasing lock "refresh_cache-5d0e1cb1-9f54-4a76-960c-99d0803afd2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.048287] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "c9402929-e845-416b-91e5-39d08ab90a2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.048594] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "c9402929-e845-416b-91e5-39d08ab90a2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.520911] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.166374] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.166683] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1319.164674] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.164674] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.159919] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.164721] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.165119] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1322.165119] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1322.188508] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.188662] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.188794] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.188925] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189115] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189293] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189426] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189550] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189670] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189789] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1322.189934] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1323.163632] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.163995] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.164470] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.692964] env[62813]: WARNING oslo_vmware.rw_handles [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.692964] env[62813]: ERROR oslo_vmware.rw_handles [ 1324.693475] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1324.695264] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1324.695581] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Copying Virtual Disk [datastore2] vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/d0068ce5-1e00-4d12-a53b-0155cd5c4fad/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1324.695928] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6144458-c283-433a-b228-8589930f84fd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.706402] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Waiting for the task: (returnval){ [ 1324.706402] env[62813]: value = "task-4267690" [ 1324.706402] env[62813]: _type = "Task" [ 1324.706402] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.715485] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Task: {'id': task-4267690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.164625] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.179066] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.179066] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.179066] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.179066] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1325.179962] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6837bab-0522-4d14-8ab8-0aba44d8abed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.189692] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f837ae43-41a5-43dc-a578-c249920b5a13 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.205036] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a55f261-4a83-43bc-b9f8-a9a9d3d34421 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.217655] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71eeb73-e5e3-40e9-ae19-de96f9c62948 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.220563] env[62813]: DEBUG oslo_vmware.exceptions [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1325.221120] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.221670] env[62813]: ERROR nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.221670] env[62813]: Faults: ['InvalidArgument'] [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Traceback (most recent call last): [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] yield resources [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self.driver.spawn(context, instance, image_meta, [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self._fetch_image_if_missing(context, vi) [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] image_cache(vi, tmp_image_ds_loc) [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] vm_util.copy_virtual_disk( [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] session._wait_for_task(vmdk_copy_task) [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] return self.wait_for_task(task_ref) [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] return evt.wait() [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] result = hub.switch() [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] return self.greenlet.switch() [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self.f(*self.args, **self.kw) [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] raise exceptions.translate_fault(task_info.error) [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Faults: ['InvalidArgument'] [ 1325.221670] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] [ 1325.222504] env[62813]: INFO nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Terminating instance [ 1325.224648] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1325.224870] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1325.225489] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.225779] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.226722] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bdaa33-9ce9-482c-be22-a72ddac1c2f3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.254265] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80f78d0a-b2fa-43bf-88ee-a1be893d1a1b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.256850] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180782MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1325.257026] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.257211] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.263538] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1325.263856] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0cb6d4f-ca03-4ad8-8c98-17d5bc561312 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.266779] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.267012] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1325.268209] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2127297-8fd6-4a1a-9dcc-dae12029d4bd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.273340] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1325.273340] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52477b77-810d-0494-9313-9c6fa1019de5" [ 1325.273340] env[62813]: _type = "Task" [ 1325.273340] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.281491] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52477b77-810d-0494-9313-9c6fa1019de5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.337249] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1325.337501] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1325.337723] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Deleting the datastore file [datastore2] b946bdda-a8a4-4a82-b2f7-99637fcae21c {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.337994] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d96208d4-11de-4a95-a7ec-ec307a7d2416 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.344061] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.344220] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.344356] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.344481] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.344606] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.344727] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.345328] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.345328] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.345328] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.345328] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1325.347943] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Waiting for the task: (returnval){ [ 1325.347943] env[62813]: value = "task-4267692" [ 1325.347943] env[62813]: _type = "Task" [ 1325.347943] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.357862] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Task: {'id': task-4267692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.361678] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.373572] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.385974] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance abc5e07e-8408-4938-9831-42d828ef877d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.398958] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 244be995-46ef-43fe-bec7-bdf9da081985 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.410831] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 125e3e78-bb24-47c5-9096-00c6667c925a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.422457] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9fdaddae-c9a4-4867-9f80-91e70efd2b51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.433889] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.445240] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b06482cf-8823-41af-a940-2dfb5d72e70e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.456052] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 8b7930d8-8117-4b3d-8218-39bff602a4b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.466926] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 467d9133-7fe6-44e7-9ffb-f0edacf3be81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.477198] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1325.477453] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1325.477602] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1325.733669] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2195505-052c-4cbb-b617-b872b17ef233 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.742409] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53292af-00df-4a7b-b20b-9d32050566f8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.778331] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97396d34-3219-4347-ac19-84ddcbef38e0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.788768] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8299fbe7-2dc8-48ad-b635-b9eb5538af38 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.792571] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1325.792812] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating directory with path [datastore2] vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.793344] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1098312-e597-4fd5-957a-4ede0e9978e5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.803622] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.805683] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Created directory with path [datastore2] vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.805877] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Fetch image to [datastore2] vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1325.806064] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1325.806977] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b026d6-f87a-4da0-b056-92a1aed8296f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.814199] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f4b73d-d4be-4e5d-9def-352d266adcff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.818797] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1325.827707] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cb4e55-804d-46b2-ba6a-775427280ac7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.862628] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1325.862839] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.606s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.866566] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc93001c-dc58-4db0-9510-a8ee985da04b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.878349] env[62813]: DEBUG oslo_vmware.api [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Task: {'id': task-4267692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079332} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.878568] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-89f60f54-2576-4e69-b180-5afd1b459b9e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.880474] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.880672] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1325.880852] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1325.881041] env[62813]: INFO nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1325.883416] env[62813]: DEBUG nova.compute.claims [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1325.883610] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.883842] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.907459] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1325.972851] env[62813]: DEBUG oslo_vmware.rw_handles [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1326.029901] env[62813]: DEBUG oslo_vmware.rw_handles [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1326.030118] env[62813]: DEBUG oslo_vmware.rw_handles [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1326.243438] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702b5cc3-289f-471b-923c-955fc253959a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.251432] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccd4838-829f-4c48-b33a-a95a2d1e5edf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.280974] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3cf76c-e22e-43d8-b555-d83121e87e78 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.289785] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5467f896-e96b-49cc-8433-b6f211838342 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.303574] env[62813]: DEBUG nova.compute.provider_tree [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.312561] env[62813]: DEBUG nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1326.326919] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.443s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.327481] env[62813]: ERROR nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.327481] env[62813]: Faults: ['InvalidArgument'] [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Traceback (most recent call last): [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self.driver.spawn(context, instance, image_meta, [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self._fetch_image_if_missing(context, vi) [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] image_cache(vi, tmp_image_ds_loc) [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] vm_util.copy_virtual_disk( [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] session._wait_for_task(vmdk_copy_task) [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] return self.wait_for_task(task_ref) [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] return evt.wait() [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] result = hub.switch() [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] return self.greenlet.switch() [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] self.f(*self.args, **self.kw) [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] raise exceptions.translate_fault(task_info.error) [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Faults: ['InvalidArgument'] [ 1326.327481] env[62813]: ERROR nova.compute.manager [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] [ 1326.328360] env[62813]: DEBUG nova.compute.utils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1326.329712] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Build of instance b946bdda-a8a4-4a82-b2f7-99637fcae21c was re-scheduled: A specified parameter was not correct: fileType [ 1326.329712] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1326.330110] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1326.330284] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1326.330457] env[62813]: DEBUG nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1326.330623] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1326.666350] env[62813]: DEBUG nova.network.neutron [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.678111] env[62813]: INFO nova.compute.manager [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Took 0.35 seconds to deallocate network for instance. [ 1326.779598] env[62813]: INFO nova.scheduler.client.report [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Deleted allocations for instance b946bdda-a8a4-4a82-b2f7-99637fcae21c [ 1326.806158] env[62813]: DEBUG oslo_concurrency.lockutils [None req-19cdae3d-f9e4-4cad-beb6-38b1d307f498 tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 689.615s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.806973] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 486.664s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.807219] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Acquiring lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.807427] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.807596] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.810159] env[62813]: INFO nova.compute.manager [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Terminating instance [ 1326.812460] env[62813]: DEBUG nova.compute.manager [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1326.812676] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1326.813258] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04a8db43-0d18-46e0-bccc-bc09debba529 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.823027] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b9daa9-4937-4db2-93ce-73e7a33b10db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.833999] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1326.856215] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b946bdda-a8a4-4a82-b2f7-99637fcae21c could not be found. [ 1326.856478] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1326.856676] env[62813]: INFO nova.compute.manager [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1326.856930] env[62813]: DEBUG oslo.service.loopingcall [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1326.857185] env[62813]: DEBUG nova.compute.manager [-] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1326.857285] env[62813]: DEBUG nova.network.neutron [-] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1326.882319] env[62813]: DEBUG nova.network.neutron [-] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.886394] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.886646] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.888188] env[62813]: INFO nova.compute.claims [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1326.891941] env[62813]: INFO nova.compute.manager [-] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] Took 0.03 seconds to deallocate network for instance. [ 1326.990735] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33e5c19a-70b2-46f3-b709-28a7ca4079ee tempest-ServersAdminTestJSON-1450973175 tempest-ServersAdminTestJSON-1450973175-project-member] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.991953] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 178.295s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.992157] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b946bdda-a8a4-4a82-b2f7-99637fcae21c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1326.992362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "b946bdda-a8a4-4a82-b2f7-99637fcae21c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.210496] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268009d2-2a0c-4cb1-8895-6f0f26be1255 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.218256] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d5f85a-b1cd-4d3c-8367-bc080858b0d9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.250316] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339ccee6-734a-4a42-8d6d-6989c4f8cb8d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.258994] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1938517a-c6ad-4a5d-b8f1-64607404008b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.273479] env[62813]: DEBUG nova.compute.provider_tree [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.283432] env[62813]: DEBUG nova.scheduler.client.report [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1327.300116] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.413s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.300625] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1327.333637] env[62813]: DEBUG nova.compute.utils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.335246] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1327.335418] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1327.344661] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1327.400119] env[62813]: DEBUG nova.policy [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2d35c8be82e4fc9869db5e897120b41', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f31911e62f9b45ae85874fccc7a916f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1327.412492] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1327.438947] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1327.439268] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1327.439430] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.439612] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1327.439762] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.439914] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1327.440179] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1327.440351] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1327.440520] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1327.440686] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1327.440864] env[62813]: DEBUG nova.virt.hardware [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1327.441759] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46da3e45-c0e0-4671-a83b-45f64b0afd06 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.450337] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0ccc83-cdd0-43e9-b878-f6e5d5509e79 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.959567] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Successfully created port: 67006eba-6f10-4034-8e43-11d7146685fb {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1328.703728] env[62813]: DEBUG nova.compute.manager [req-5343f2e2-9119-4d2a-bb96-210e0955dadb req-8da06ff2-503a-461a-9739-ff5ff3babf4a service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Received event network-vif-plugged-67006eba-6f10-4034-8e43-11d7146685fb {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1328.703991] env[62813]: DEBUG oslo_concurrency.lockutils [req-5343f2e2-9119-4d2a-bb96-210e0955dadb req-8da06ff2-503a-461a-9739-ff5ff3babf4a service nova] Acquiring lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.704211] env[62813]: DEBUG oslo_concurrency.lockutils [req-5343f2e2-9119-4d2a-bb96-210e0955dadb req-8da06ff2-503a-461a-9739-ff5ff3babf4a service nova] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.704383] env[62813]: DEBUG oslo_concurrency.lockutils [req-5343f2e2-9119-4d2a-bb96-210e0955dadb req-8da06ff2-503a-461a-9739-ff5ff3babf4a service nova] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.704553] env[62813]: DEBUG nova.compute.manager [req-5343f2e2-9119-4d2a-bb96-210e0955dadb req-8da06ff2-503a-461a-9739-ff5ff3babf4a service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] No waiting events found dispatching network-vif-plugged-67006eba-6f10-4034-8e43-11d7146685fb {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1328.704715] env[62813]: WARNING nova.compute.manager [req-5343f2e2-9119-4d2a-bb96-210e0955dadb req-8da06ff2-503a-461a-9739-ff5ff3babf4a service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Received unexpected event network-vif-plugged-67006eba-6f10-4034-8e43-11d7146685fb for instance with vm_state building and task_state spawning. [ 1328.729428] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Successfully updated port: 67006eba-6f10-4034-8e43-11d7146685fb {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.744322] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "refresh_cache-b50a8094-fc39-420f-a1d0-a29b5ee29df2" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.744469] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired lock "refresh_cache-b50a8094-fc39-420f-a1d0-a29b5ee29df2" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.744617] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1328.789642] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1328.988451] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Updating instance_info_cache with network_info: [{"id": "67006eba-6f10-4034-8e43-11d7146685fb", "address": "fa:16:3e:62:5a:d8", "network": {"id": "2cd9a110-b173-4c4d-a5a9-9372ad45917a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454094257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31911e62f9b45ae85874fccc7a916f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67006eba-6f", "ovs_interfaceid": "67006eba-6f10-4034-8e43-11d7146685fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.003350] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Releasing lock "refresh_cache-b50a8094-fc39-420f-a1d0-a29b5ee29df2" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.003660] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance network_info: |[{"id": "67006eba-6f10-4034-8e43-11d7146685fb", "address": "fa:16:3e:62:5a:d8", "network": {"id": "2cd9a110-b173-4c4d-a5a9-9372ad45917a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454094257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31911e62f9b45ae85874fccc7a916f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67006eba-6f", "ovs_interfaceid": "67006eba-6f10-4034-8e43-11d7146685fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1329.004159] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:5a:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67006eba-6f10-4034-8e43-11d7146685fb', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.011702] env[62813]: DEBUG oslo.service.loopingcall [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.015151] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1329.015151] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0973c7ce-6c78-445a-80ba-c69279be9a5a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.037177] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.037177] env[62813]: value = "task-4267693" [ 1329.037177] env[62813]: _type = "Task" [ 1329.037177] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.046712] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267693, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.548561] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267693, 'name': CreateVM_Task, 'duration_secs': 0.339431} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.548561] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1329.549306] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.549475] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.549814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.550086] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d8548a4-8dd7-474d-a8c1-d31830ec0eb8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.555548] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1329.555548] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ca282a-5f01-bd93-d23d-74207b16b018" [ 1329.555548] env[62813]: _type = "Task" [ 1329.555548] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.564567] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ca282a-5f01-bd93-d23d-74207b16b018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.070484] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.071509] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.071781] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.736841] env[62813]: DEBUG nova.compute.manager [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Received event network-changed-67006eba-6f10-4034-8e43-11d7146685fb {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1330.737127] env[62813]: DEBUG nova.compute.manager [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Refreshing instance network info cache due to event network-changed-67006eba-6f10-4034-8e43-11d7146685fb. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1330.737471] env[62813]: DEBUG oslo_concurrency.lockutils [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] Acquiring lock "refresh_cache-b50a8094-fc39-420f-a1d0-a29b5ee29df2" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.737632] env[62813]: DEBUG oslo_concurrency.lockutils [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] Acquired lock "refresh_cache-b50a8094-fc39-420f-a1d0-a29b5ee29df2" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.737811] env[62813]: DEBUG nova.network.neutron [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Refreshing network info cache for port 67006eba-6f10-4034-8e43-11d7146685fb {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1330.998786] env[62813]: DEBUG nova.network.neutron [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Updated VIF entry in instance network info cache for port 67006eba-6f10-4034-8e43-11d7146685fb. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1330.999431] env[62813]: DEBUG nova.network.neutron [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Updating instance_info_cache with network_info: [{"id": "67006eba-6f10-4034-8e43-11d7146685fb", "address": "fa:16:3e:62:5a:d8", "network": {"id": "2cd9a110-b173-4c4d-a5a9-9372ad45917a", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1454094257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31911e62f9b45ae85874fccc7a916f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67006eba-6f", "ovs_interfaceid": "67006eba-6f10-4034-8e43-11d7146685fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.012426] env[62813]: DEBUG oslo_concurrency.lockutils [req-aec1d10d-0f86-46a5-ae99-f28b432aa1f9 req-1628e487-03ca-499a-bb06-5b1c2de50547 service nova] Releasing lock "refresh_cache-b50a8094-fc39-420f-a1d0-a29b5ee29df2" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.501582] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.807869] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "d5f63ddc-e786-471d-a871-2ef878bd2455" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.808734] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.847290] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "a296754a-5842-4ab5-9dd9-ccda09caa7d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.847661] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "a296754a-5842-4ab5-9dd9-ccda09caa7d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.883105] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "5a2ecdf4-b577-41a5-bb04-a0e4efac48f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.883423] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "5a2ecdf4-b577-41a5-bb04-a0e4efac48f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.712157] env[62813]: WARNING oslo_vmware.rw_handles [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1375.712157] env[62813]: ERROR oslo_vmware.rw_handles [ 1375.712769] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1375.714715] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1375.714982] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Copying Virtual Disk [datastore2] vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/52b7d858-bd11-4902-94a1-0ef6634ce37d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1375.715291] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7203714-c7f8-448e-9acd-dbbd594133c6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.724040] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1375.724040] env[62813]: value = "task-4267694" [ 1375.724040] env[62813]: _type = "Task" [ 1375.724040] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.732192] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': task-4267694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.871102] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1375.871314] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1376.233458] env[62813]: DEBUG oslo_vmware.exceptions [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1376.233774] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.234374] env[62813]: ERROR nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1376.234374] env[62813]: Faults: ['InvalidArgument'] [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Traceback (most recent call last): [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] yield resources [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self.driver.spawn(context, instance, image_meta, [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self._fetch_image_if_missing(context, vi) [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] image_cache(vi, tmp_image_ds_loc) [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] vm_util.copy_virtual_disk( [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] session._wait_for_task(vmdk_copy_task) [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] return self.wait_for_task(task_ref) [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] return evt.wait() [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] result = hub.switch() [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] return self.greenlet.switch() [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self.f(*self.args, **self.kw) [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] raise exceptions.translate_fault(task_info.error) [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Faults: ['InvalidArgument'] [ 1376.234374] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] [ 1376.235438] env[62813]: INFO nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Terminating instance [ 1376.236491] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.236734] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.236982] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acf8c51f-d300-4a0c-aad6-8f00dc169530 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.239533] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1376.239752] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1376.240489] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd834000-cc6f-4608-b461-a2742afdf850 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.248178] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1376.248426] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89114b77-a2b8-4094-a86b-e7df9194347d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.250979] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.251177] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1376.252257] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d421c5f-8ffe-4b86-8360-5f689cd9e614 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.257865] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Waiting for the task: (returnval){ [ 1376.257865] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]520554b9-4eee-5f10-d0f6-c85dac1cd074" [ 1376.257865] env[62813]: _type = "Task" [ 1376.257865] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.271510] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]520554b9-4eee-5f10-d0f6-c85dac1cd074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.323076] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1376.323282] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1376.323474] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Deleting the datastore file [datastore2] 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1376.323759] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cd861c0-c32f-475e-8e23-a68a38400ca5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.330600] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1376.330600] env[62813]: value = "task-4267696" [ 1376.330600] env[62813]: _type = "Task" [ 1376.330600] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.339155] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': task-4267696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.768800] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1376.769161] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Creating directory with path [datastore2] vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.769334] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0d31da1-be7a-4bd6-b500-52198cf99475 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.783484] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Created directory with path [datastore2] vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.783746] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Fetch image to [datastore2] vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1376.783966] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1376.784870] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a484342-26c4-44a9-b72d-0089d4e84613 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.793172] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667d4101-f364-4189-91fe-9490839490f8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.803668] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474917ca-b6f2-4aa3-b06a-761c546ef95a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.842510] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07de57a9-9500-4dbe-a0c1-ef1eae63996f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.851260] env[62813]: DEBUG oslo_vmware.api [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': task-4267696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092719} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.852928] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1376.853181] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1376.853405] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1376.853627] env[62813]: INFO nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1376.855660] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-024abc5e-1b36-4545-a0c7-c39438be4bfa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.857941] env[62813]: DEBUG nova.compute.claims [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1376.858174] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.858426] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.884372] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1376.946386] env[62813]: DEBUG oslo_vmware.rw_handles [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1377.006145] env[62813]: DEBUG oslo_vmware.rw_handles [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1377.006371] env[62813]: DEBUG oslo_vmware.rw_handles [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1377.229532] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd17e9dd-8556-4433-95ab-a8e107d38e1f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.238370] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8eeeec-8e12-4422-8c7c-29557041d510 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.269453] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db45220-6a72-486f-ba41-e6fc7e094eda {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.276810] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc76a6d9-1e3f-4ca2-8085-7ed6c51e9c04 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.290780] env[62813]: DEBUG nova.compute.provider_tree [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.300585] env[62813]: DEBUG nova.scheduler.client.report [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1377.317744] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.459s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.318996] env[62813]: ERROR nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1377.318996] env[62813]: Faults: ['InvalidArgument'] [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Traceback (most recent call last): [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self.driver.spawn(context, instance, image_meta, [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self._fetch_image_if_missing(context, vi) [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] image_cache(vi, tmp_image_ds_loc) [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] vm_util.copy_virtual_disk( [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] session._wait_for_task(vmdk_copy_task) [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] return self.wait_for_task(task_ref) [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] return evt.wait() [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] result = hub.switch() [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] return self.greenlet.switch() [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] self.f(*self.args, **self.kw) [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] raise exceptions.translate_fault(task_info.error) [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Faults: ['InvalidArgument'] [ 1377.318996] env[62813]: ERROR nova.compute.manager [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] [ 1377.319734] env[62813]: DEBUG nova.compute.utils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1377.320606] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Build of instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 was re-scheduled: A specified parameter was not correct: fileType [ 1377.320606] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1377.321009] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1377.321205] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1377.321388] env[62813]: DEBUG nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1377.321562] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1377.668100] env[62813]: DEBUG nova.network.neutron [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.685939] env[62813]: INFO nova.compute.manager [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Took 0.36 seconds to deallocate network for instance. [ 1377.791433] env[62813]: INFO nova.scheduler.client.report [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Deleted allocations for instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 [ 1377.815053] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57fb3865-4a9f-4f53-9824-61079c4af1c1 tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.130s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.816289] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.535s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.816517] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.816730] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.816898] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.818853] env[62813]: INFO nova.compute.manager [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Terminating instance [ 1377.820627] env[62813]: DEBUG nova.compute.manager [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1377.820852] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1377.821326] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b143122-cac4-4fd5-bd2d-048933136467 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.831181] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33186b0b-31e9-4c0d-907a-e318831c549b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.842231] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1377.864331] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4 could not be found. [ 1377.864617] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1377.864778] env[62813]: INFO nova.compute.manager [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1377.865057] env[62813]: DEBUG oslo.service.loopingcall [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1377.865287] env[62813]: DEBUG nova.compute.manager [-] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1377.865400] env[62813]: DEBUG nova.network.neutron [-] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1377.890039] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.890292] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.891806] env[62813]: INFO nova.compute.claims [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.895031] env[62813]: DEBUG nova.network.neutron [-] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.903211] env[62813]: INFO nova.compute.manager [-] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] Took 0.04 seconds to deallocate network for instance. [ 1378.003860] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b695ba0a-48af-4fb8-af68-cd1f5f71398d tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.004980] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 229.308s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.005228] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 99a760f5-19cd-4f1f-86d7-ebb09bb54ef4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1378.005464] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "99a760f5-19cd-4f1f-86d7-ebb09bb54ef4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.224608] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fdfa1a-d7ae-4442-b9a2-f06fe42ab622 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.233802] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42ab02-45bf-4380-802d-762845cf56a5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.266837] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c5588d-3f4f-42a3-bee4-4afda6dc01ca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.274879] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba940b5-3da8-4a67-986f-d7d8bfea60ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.289016] env[62813]: DEBUG nova.compute.provider_tree [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.317198] env[62813]: DEBUG nova.scheduler.client.report [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1378.331136] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.441s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.331692] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1378.367405] env[62813]: DEBUG nova.compute.utils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1378.368636] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1378.368808] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1378.380849] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1378.433792] env[62813]: DEBUG nova.policy [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8cbb2acb6d24968a0eb6fa38a4f6edb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '396b02b3f3d14523af2d4247490aa63f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1378.451399] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1378.477853] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1378.478132] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1378.478316] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1378.478518] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1378.478679] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1378.478833] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1378.480132] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1378.480132] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1378.480132] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1378.480132] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1378.480132] env[62813]: DEBUG nova.virt.hardware [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1378.480617] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f553b1-1457-4647-981a-20fa543b21dd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.489286] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599c144b-9554-4fef-8652-1957856a4857 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.039017] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Successfully created port: ce05737a-3443-4d6a-a8fa-9eebdb67765d {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.164326] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.722251] env[62813]: DEBUG nova.compute.manager [req-cb79b2e3-49f4-40fc-9e82-ec079c05f02a req-c12ea114-581b-4e1d-aa93-a3b9774029f3 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Received event network-vif-plugged-ce05737a-3443-4d6a-a8fa-9eebdb67765d {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1379.722674] env[62813]: DEBUG oslo_concurrency.lockutils [req-cb79b2e3-49f4-40fc-9e82-ec079c05f02a req-c12ea114-581b-4e1d-aa93-a3b9774029f3 service nova] Acquiring lock "d40089e3-67b3-452e-a0d1-18d5def1ff34-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.723246] env[62813]: DEBUG oslo_concurrency.lockutils [req-cb79b2e3-49f4-40fc-9e82-ec079c05f02a req-c12ea114-581b-4e1d-aa93-a3b9774029f3 service nova] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.723536] env[62813]: DEBUG oslo_concurrency.lockutils [req-cb79b2e3-49f4-40fc-9e82-ec079c05f02a req-c12ea114-581b-4e1d-aa93-a3b9774029f3 service nova] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.727492] env[62813]: DEBUG nova.compute.manager [req-cb79b2e3-49f4-40fc-9e82-ec079c05f02a req-c12ea114-581b-4e1d-aa93-a3b9774029f3 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] No waiting events found dispatching network-vif-plugged-ce05737a-3443-4d6a-a8fa-9eebdb67765d {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1379.727492] env[62813]: WARNING nova.compute.manager [req-cb79b2e3-49f4-40fc-9e82-ec079c05f02a req-c12ea114-581b-4e1d-aa93-a3b9774029f3 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Received unexpected event network-vif-plugged-ce05737a-3443-4d6a-a8fa-9eebdb67765d for instance with vm_state building and task_state spawning. [ 1379.777148] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Successfully updated port: ce05737a-3443-4d6a-a8fa-9eebdb67765d {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1379.792528] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "refresh_cache-d40089e3-67b3-452e-a0d1-18d5def1ff34" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.792731] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquired lock "refresh_cache-d40089e3-67b3-452e-a0d1-18d5def1ff34" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.792893] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1379.860192] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1380.088355] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Updating instance_info_cache with network_info: [{"id": "ce05737a-3443-4d6a-a8fa-9eebdb67765d", "address": "fa:16:3e:e6:3a:dd", "network": {"id": "b76d339e-83de-4d13-bb08-1c32f8ab0ced", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1157331176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "396b02b3f3d14523af2d4247490aa63f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce05737a-34", "ovs_interfaceid": "ce05737a-3443-4d6a-a8fa-9eebdb67765d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.099857] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Releasing lock "refresh_cache-d40089e3-67b3-452e-a0d1-18d5def1ff34" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.100184] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance network_info: |[{"id": "ce05737a-3443-4d6a-a8fa-9eebdb67765d", "address": "fa:16:3e:e6:3a:dd", "network": {"id": "b76d339e-83de-4d13-bb08-1c32f8ab0ced", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1157331176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "396b02b3f3d14523af2d4247490aa63f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce05737a-34", "ovs_interfaceid": "ce05737a-3443-4d6a-a8fa-9eebdb67765d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1380.100611] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:3a:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce05737a-3443-4d6a-a8fa-9eebdb67765d', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.108101] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Creating folder: Project (396b02b3f3d14523af2d4247490aa63f). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1380.108682] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75dc7427-1c05-4c77-aa22-53e87abab500 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.120387] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Created folder: Project (396b02b3f3d14523af2d4247490aa63f) in parent group-v840812. [ 1380.120584] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Creating folder: Instances. Parent ref: group-v840890. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1380.120871] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29160b8a-1e2d-4c9a-a53e-c9dc64bba03b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.131145] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Created folder: Instances in parent group-v840890. [ 1380.131405] env[62813]: DEBUG oslo.service.loopingcall [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.131599] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1380.131811] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f5797bd-2d04-40bc-a26b-148ac6d2cd65 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.155874] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.155874] env[62813]: value = "task-4267699" [ 1380.155874] env[62813]: _type = "Task" [ 1380.155874] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.166859] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.167124] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267699, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.665634] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267699, 'name': CreateVM_Task, 'duration_secs': 0.322316} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.665812] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1380.666497] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.666661] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.666981] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1380.667254] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e1fee2-fdc7-4487-a218-711821a98f43 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.671915] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Waiting for the task: (returnval){ [ 1380.671915] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5270aee3-cf7d-38b8-093b-1979c27475e0" [ 1380.671915] env[62813]: _type = "Task" [ 1380.671915] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.680118] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5270aee3-cf7d-38b8-093b-1979c27475e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.183481] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.183832] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1381.184028] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.746098] env[62813]: DEBUG nova.compute.manager [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Received event network-changed-ce05737a-3443-4d6a-a8fa-9eebdb67765d {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1381.746325] env[62813]: DEBUG nova.compute.manager [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Refreshing instance network info cache due to event network-changed-ce05737a-3443-4d6a-a8fa-9eebdb67765d. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1381.746541] env[62813]: DEBUG oslo_concurrency.lockutils [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] Acquiring lock "refresh_cache-d40089e3-67b3-452e-a0d1-18d5def1ff34" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.746696] env[62813]: DEBUG oslo_concurrency.lockutils [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] Acquired lock "refresh_cache-d40089e3-67b3-452e-a0d1-18d5def1ff34" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.746878] env[62813]: DEBUG nova.network.neutron [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Refreshing network info cache for port ce05737a-3443-4d6a-a8fa-9eebdb67765d {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1382.027433] env[62813]: DEBUG nova.network.neutron [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Updated VIF entry in instance network info cache for port ce05737a-3443-4d6a-a8fa-9eebdb67765d. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1382.027892] env[62813]: DEBUG nova.network.neutron [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Updating instance_info_cache with network_info: [{"id": "ce05737a-3443-4d6a-a8fa-9eebdb67765d", "address": "fa:16:3e:e6:3a:dd", "network": {"id": "b76d339e-83de-4d13-bb08-1c32f8ab0ced", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1157331176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "396b02b3f3d14523af2d4247490aa63f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce05737a-34", "ovs_interfaceid": "ce05737a-3443-4d6a-a8fa-9eebdb67765d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.039314] env[62813]: DEBUG oslo_concurrency.lockutils [req-cae804eb-5122-477a-828e-ff6721bc2f66 req-218bf947-08ed-46a4-a3b8-511b14f84a83 service nova] Releasing lock "refresh_cache-d40089e3-67b3-452e-a0d1-18d5def1ff34" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.160661] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.163245] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.163403] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1383.163527] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1383.192737] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.193044] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.193277] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.193506] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.193688] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.193957] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.194609] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.194609] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.194609] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.194760] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1383.194880] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1383.195622] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.164604] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.159614] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.185340] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.163648] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.175220] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.175467] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.175672] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.175832] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1387.177368] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380df4a8-23de-4173-a39a-b317ce07afb2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.186810] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176681e1-3ef6-4704-a19a-c376c8a4fa97 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.202418] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6bf7be-0e5f-4584-a786-278a9242249e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.210337] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075f84de-9dca-4f97-9f2d-0312afa53663 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.242313] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180778MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1387.242313] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.242606] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.322045] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.322248] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.322362] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.322516] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.322638] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.322759] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.322876] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.323017] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.323138] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.323255] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1387.335644] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance abc5e07e-8408-4938-9831-42d828ef877d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.348627] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 125e3e78-bb24-47c5-9096-00c6667c925a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.359511] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 9fdaddae-c9a4-4867-9f80-91e70efd2b51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.371726] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.382445] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b06482cf-8823-41af-a940-2dfb5d72e70e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.393678] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 8b7930d8-8117-4b3d-8218-39bff602a4b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.406143] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 467d9133-7fe6-44e7-9ffb-f0edacf3be81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.417438] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.429044] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.439380] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a296754a-5842-4ab5-9dd9-ccda09caa7d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.449789] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5a2ecdf4-b577-41a5-bb04-a0e4efac48f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1387.449996] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1387.450844] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1387.564428] env[62813]: DEBUG oslo_concurrency.lockutils [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.715508] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545a07d4-80e0-4613-9470-02b437c4bc18 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.723914] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41ce984-8375-496c-aae6-c8a37f8ab219 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.754136] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b42aac9-5b66-4bd0-a79b-c8ca3786c67a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.763168] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ad4ea2-90ad-4560-a8c4-9a926b429990 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.779375] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.789026] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.802935] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1387.803310] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.561s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.022633] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "176d5151-358a-4b90-9aff-064aa9648618" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.022942] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "176d5151-358a-4b90-9aff-064aa9648618" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.311863] env[62813]: WARNING oslo_vmware.rw_handles [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1424.311863] env[62813]: ERROR oslo_vmware.rw_handles [ 1424.312531] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1424.314551] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1424.314855] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Copying Virtual Disk [datastore2] vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/9153b87f-09b3-47ca-b550-d152e199423a/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1424.315229] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-411c8f79-de11-45f8-9872-5f183f8353e2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.324123] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Waiting for the task: (returnval){ [ 1424.324123] env[62813]: value = "task-4267700" [ 1424.324123] env[62813]: _type = "Task" [ 1424.324123] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.333615] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Task: {'id': task-4267700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.834612] env[62813]: DEBUG oslo_vmware.exceptions [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1424.834922] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.835517] env[62813]: ERROR nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1424.835517] env[62813]: Faults: ['InvalidArgument'] [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Traceback (most recent call last): [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] yield resources [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self.driver.spawn(context, instance, image_meta, [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self._fetch_image_if_missing(context, vi) [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] image_cache(vi, tmp_image_ds_loc) [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] vm_util.copy_virtual_disk( [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] session._wait_for_task(vmdk_copy_task) [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] return self.wait_for_task(task_ref) [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] return evt.wait() [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] result = hub.switch() [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] return self.greenlet.switch() [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self.f(*self.args, **self.kw) [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] raise exceptions.translate_fault(task_info.error) [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Faults: ['InvalidArgument'] [ 1424.835517] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] [ 1424.836425] env[62813]: INFO nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Terminating instance [ 1424.837605] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.837828] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.838093] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11e6aed2-83c5-405d-9986-d0f8834ecdd4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.840682] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1424.840859] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1424.841655] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e837f958-efad-4428-89f3-6e9514eb86db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.848701] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1424.848928] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92bc9f41-fdce-47c4-9ec2-7cbe5aa8dfff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.851362] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.851540] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1424.852536] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14e2d854-5021-4a7f-8c3e-05d81891cc0d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.857741] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Waiting for the task: (returnval){ [ 1424.857741] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52da546f-1231-5ae6-2962-c5bae3cb7d81" [ 1424.857741] env[62813]: _type = "Task" [ 1424.857741] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.866729] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52da546f-1231-5ae6-2962-c5bae3cb7d81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.923396] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1424.923651] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1424.923788] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Deleting the datastore file [datastore2] c9b7bace-d76a-4dd8-8283-b56fd86a77a4 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.924068] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa59140c-2ffd-4e9f-a14d-41f3a2963c56 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.930574] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Waiting for the task: (returnval){ [ 1424.930574] env[62813]: value = "task-4267702" [ 1424.930574] env[62813]: _type = "Task" [ 1424.930574] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.938957] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Task: {'id': task-4267702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.368277] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1425.368600] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Creating directory with path [datastore2] vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.368787] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d286904d-6cf4-44a8-94d4-68da59c19499 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.380347] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Created directory with path [datastore2] vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.380538] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Fetch image to [datastore2] vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1425.380713] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1425.381480] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5956874b-3a7e-44fb-8dc3-7f58dc8b26fb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.390758] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2d9e07-bc76-446a-9bce-07dd4ebb475a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.400798] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b4ebc5-d133-4368-9957-beb15cfce18c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.431250] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3b431c-9c9e-4d39-869d-354b61c4a18e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.443580] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-506b3109-d30b-46af-ac88-68249696e741 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.445623] env[62813]: DEBUG oslo_vmware.api [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Task: {'id': task-4267702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080667} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.445889] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.446090] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1425.446269] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1425.446452] env[62813]: INFO nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1425.448767] env[62813]: DEBUG nova.compute.claims [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1425.448967] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.449213] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.469704] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1425.673812] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1425.732157] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1425.732362] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1425.823809] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f26b5f-e1ad-4fa4-9e61-fa7ac8a9dd14 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.831961] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afacaac3-b916-4702-9ade-18cc1b008ee5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.862634] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0311e28f-66b0-424b-a6c4-499c7a68f09c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.870301] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282a2260-ece0-44ce-bb0e-1fbe9e2b4a7a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.883980] env[62813]: DEBUG nova.compute.provider_tree [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.893226] env[62813]: DEBUG nova.scheduler.client.report [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1425.909125] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.460s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.909702] env[62813]: ERROR nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.909702] env[62813]: Faults: ['InvalidArgument'] [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Traceback (most recent call last): [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self.driver.spawn(context, instance, image_meta, [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self._fetch_image_if_missing(context, vi) [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] image_cache(vi, tmp_image_ds_loc) [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] vm_util.copy_virtual_disk( [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] session._wait_for_task(vmdk_copy_task) [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] return self.wait_for_task(task_ref) [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] return evt.wait() [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] result = hub.switch() [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] return self.greenlet.switch() [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] self.f(*self.args, **self.kw) [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] raise exceptions.translate_fault(task_info.error) [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Faults: ['InvalidArgument'] [ 1425.909702] env[62813]: ERROR nova.compute.manager [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] [ 1425.910615] env[62813]: DEBUG nova.compute.utils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1425.911838] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Build of instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 was re-scheduled: A specified parameter was not correct: fileType [ 1425.911838] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1425.912289] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1425.912497] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1425.912690] env[62813]: DEBUG nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1425.912897] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1426.403279] env[62813]: DEBUG nova.network.neutron [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.416035] env[62813]: INFO nova.compute.manager [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Took 0.50 seconds to deallocate network for instance. [ 1426.512980] env[62813]: INFO nova.scheduler.client.report [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Deleted allocations for instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 [ 1426.539962] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c488a3db-3f01-4c03-9d25-d675de5060a3 tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.691s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.539962] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.985s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.539962] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Acquiring lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.540342] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.540458] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.542537] env[62813]: INFO nova.compute.manager [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Terminating instance [ 1426.547235] env[62813]: DEBUG nova.compute.manager [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1426.547554] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1426.548202] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3503e5e7-2e16-4f58-9a8b-1459cb29e71e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.558983] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e83be1-9ac4-407d-8623-1ba1dfc47309 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.569626] env[62813]: DEBUG nova.compute.manager [None req-69dbdc74-8958-4c76-b8c0-e9c1cf7a655a tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: abc5e07e-8408-4938-9831-42d828ef877d] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1426.591011] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9b7bace-d76a-4dd8-8283-b56fd86a77a4 could not be found. [ 1426.591367] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1426.591699] env[62813]: INFO nova.compute.manager [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1426.591840] env[62813]: DEBUG oslo.service.loopingcall [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.593691] env[62813]: DEBUG nova.compute.manager [-] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1426.593691] env[62813]: DEBUG nova.network.neutron [-] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1426.604348] env[62813]: DEBUG nova.compute.manager [None req-69dbdc74-8958-4c76-b8c0-e9c1cf7a655a tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: abc5e07e-8408-4938-9831-42d828ef877d] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1426.618859] env[62813]: DEBUG nova.network.neutron [-] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.630932] env[62813]: INFO nova.compute.manager [-] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] Took 0.04 seconds to deallocate network for instance. [ 1426.641581] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69dbdc74-8958-4c76-b8c0-e9c1cf7a655a tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "abc5e07e-8408-4938-9831-42d828ef877d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.986s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.654379] env[62813]: DEBUG nova.compute.manager [None req-271da832-8aa3-4fe8-94b7-80647ca10451 tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: 244be995-46ef-43fe-bec7-bdf9da081985] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1426.692091] env[62813]: DEBUG nova.compute.manager [None req-271da832-8aa3-4fe8-94b7-80647ca10451 tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: 244be995-46ef-43fe-bec7-bdf9da081985] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1426.724241] env[62813]: DEBUG oslo_concurrency.lockutils [None req-271da832-8aa3-4fe8-94b7-80647ca10451 tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "244be995-46ef-43fe-bec7-bdf9da081985" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.670s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.735704] env[62813]: DEBUG nova.compute.manager [None req-c224f39d-c12d-41f3-9bfc-665fa481cad1 tempest-ServerRescueTestJSON-1480939942 tempest-ServerRescueTestJSON-1480939942-project-member] [instance: 125e3e78-bb24-47c5-9096-00c6667c925a] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1426.740969] env[62813]: DEBUG oslo_concurrency.lockutils [None req-7571f0f2-80e9-4db8-8194-b08538f5af7a tempest-AttachInterfacesV270Test-1041329423 tempest-AttachInterfacesV270Test-1041329423-project-member] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.741831] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 278.045s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.742089] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9b7bace-d76a-4dd8-8283-b56fd86a77a4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1426.742298] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "c9b7bace-d76a-4dd8-8283-b56fd86a77a4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.761360] env[62813]: DEBUG nova.compute.manager [None req-c224f39d-c12d-41f3-9bfc-665fa481cad1 tempest-ServerRescueTestJSON-1480939942 tempest-ServerRescueTestJSON-1480939942-project-member] [instance: 125e3e78-bb24-47c5-9096-00c6667c925a] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1426.781792] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c224f39d-c12d-41f3-9bfc-665fa481cad1 tempest-ServerRescueTestJSON-1480939942 tempest-ServerRescueTestJSON-1480939942-project-member] Lock "125e3e78-bb24-47c5-9096-00c6667c925a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.400s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.792316] env[62813]: DEBUG nova.compute.manager [None req-ac62f249-18c2-4e7c-8415-3bd58f925002 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: 9fdaddae-c9a4-4867-9f80-91e70efd2b51] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1426.817476] env[62813]: DEBUG nova.compute.manager [None req-ac62f249-18c2-4e7c-8415-3bd58f925002 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: 9fdaddae-c9a4-4867-9f80-91e70efd2b51] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1426.843716] env[62813]: DEBUG oslo_concurrency.lockutils [None req-ac62f249-18c2-4e7c-8415-3bd58f925002 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "9fdaddae-c9a4-4867-9f80-91e70efd2b51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.074s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.854058] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1426.905163] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.905410] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.907082] env[62813]: INFO nova.compute.claims [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.213390] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ab5108-f536-4e6c-94ff-00664a1c8c26 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.221712] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fe3edf-bb15-46ea-8cb2-e4f826324bf5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.254332] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85792a7b-0383-4a75-a611-673a2452819b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.263696] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4919d9c-d612-43e9-8e7e-9db52f9d3ae0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.279664] env[62813]: DEBUG nova.compute.provider_tree [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.291489] env[62813]: DEBUG nova.scheduler.client.report [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1427.312680] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.407s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.313259] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1427.352948] env[62813]: DEBUG nova.compute.utils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1427.355087] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1427.355331] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1427.368209] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1427.442987] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1427.471430] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.471700] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.471881] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.472116] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.472276] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.472445] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.472687] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.472876] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.473632] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.473632] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.473632] env[62813]: DEBUG nova.virt.hardware [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.474316] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ace0ae-2a66-4d1d-a100-00dfeb1516a3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.478274] env[62813]: DEBUG nova.policy [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e577489b4e784e5abaa6a755ab08a2c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d2731f99cdc4553bd301f33c4df1517', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1427.485831] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c98c352-b46c-4d65-94d1-ced1cf9e724e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.158678] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "2c94570a-7bb0-4719-9982-0e7710470db1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.158901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "2c94570a-7bb0-4719-9982-0e7710470db1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.365065] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Successfully created port: fb30160e-ee19-459c-bf4a-efa6ee135043 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1429.257549] env[62813]: DEBUG nova.compute.manager [req-4b89d654-5637-43ad-817c-17eee9bb99fb req-d942f12a-7c3c-49be-abae-e8fe750affce service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Received event network-vif-plugged-fb30160e-ee19-459c-bf4a-efa6ee135043 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1429.257809] env[62813]: DEBUG oslo_concurrency.lockutils [req-4b89d654-5637-43ad-817c-17eee9bb99fb req-d942f12a-7c3c-49be-abae-e8fe750affce service nova] Acquiring lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.257970] env[62813]: DEBUG oslo_concurrency.lockutils [req-4b89d654-5637-43ad-817c-17eee9bb99fb req-d942f12a-7c3c-49be-abae-e8fe750affce service nova] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.258160] env[62813]: DEBUG oslo_concurrency.lockutils [req-4b89d654-5637-43ad-817c-17eee9bb99fb req-d942f12a-7c3c-49be-abae-e8fe750affce service nova] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.258326] env[62813]: DEBUG nova.compute.manager [req-4b89d654-5637-43ad-817c-17eee9bb99fb req-d942f12a-7c3c-49be-abae-e8fe750affce service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] No waiting events found dispatching network-vif-plugged-fb30160e-ee19-459c-bf4a-efa6ee135043 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1429.258490] env[62813]: WARNING nova.compute.manager [req-4b89d654-5637-43ad-817c-17eee9bb99fb req-d942f12a-7c3c-49be-abae-e8fe750affce service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Received unexpected event network-vif-plugged-fb30160e-ee19-459c-bf4a-efa6ee135043 for instance with vm_state building and task_state spawning. [ 1429.360046] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Successfully updated port: fb30160e-ee19-459c-bf4a-efa6ee135043 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1429.379264] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "refresh_cache-07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.379264] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "refresh_cache-07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.379264] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1429.431056] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1429.898903] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Updating instance_info_cache with network_info: [{"id": "fb30160e-ee19-459c-bf4a-efa6ee135043", "address": "fa:16:3e:f9:ef:c1", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb30160e-ee", "ovs_interfaceid": "fb30160e-ee19-459c-bf4a-efa6ee135043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.921927] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "refresh_cache-07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.922265] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance network_info: |[{"id": "fb30160e-ee19-459c-bf4a-efa6ee135043", "address": "fa:16:3e:f9:ef:c1", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb30160e-ee", "ovs_interfaceid": "fb30160e-ee19-459c-bf4a-efa6ee135043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1429.922699] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:ef:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb30160e-ee19-459c-bf4a-efa6ee135043', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.931558] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating folder: Project (9d2731f99cdc4553bd301f33c4df1517). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1429.931558] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2747c3a3-34d2-472f-89ed-865d7a76014c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.943837] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Created folder: Project (9d2731f99cdc4553bd301f33c4df1517) in parent group-v840812. [ 1429.944109] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating folder: Instances. Parent ref: group-v840893. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1429.944420] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-655405fb-ca92-42d0-87d2-bad61c1c4a05 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.954166] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Created folder: Instances in parent group-v840893. [ 1429.954434] env[62813]: DEBUG oslo.service.loopingcall [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.954631] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1429.954840] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f4a05d8-ced0-4995-82b2-7a10505559e3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.975452] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.975452] env[62813]: value = "task-4267705" [ 1429.975452] env[62813]: _type = "Task" [ 1429.975452] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.983444] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267705, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.484828] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267705, 'name': CreateVM_Task, 'duration_secs': 0.356781} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.485237] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1430.485701] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.485868] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.486226] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.486481] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e608eede-0ec9-4c51-b6b8-4372b9dc4290 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.491540] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 1430.491540] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5259f456-7e96-51a9-7dcc-b7e4b85a70e7" [ 1430.491540] env[62813]: _type = "Task" [ 1430.491540] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.500015] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5259f456-7e96-51a9-7dcc-b7e4b85a70e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.004689] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.005065] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.005235] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.290383] env[62813]: DEBUG nova.compute.manager [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Received event network-changed-fb30160e-ee19-459c-bf4a-efa6ee135043 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1431.290383] env[62813]: DEBUG nova.compute.manager [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Refreshing instance network info cache due to event network-changed-fb30160e-ee19-459c-bf4a-efa6ee135043. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1431.290383] env[62813]: DEBUG oslo_concurrency.lockutils [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] Acquiring lock "refresh_cache-07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.290383] env[62813]: DEBUG oslo_concurrency.lockutils [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] Acquired lock "refresh_cache-07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.290383] env[62813]: DEBUG nova.network.neutron [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Refreshing network info cache for port fb30160e-ee19-459c-bf4a-efa6ee135043 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1431.306804] env[62813]: DEBUG oslo_concurrency.lockutils [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.036529] env[62813]: DEBUG nova.network.neutron [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Updated VIF entry in instance network info cache for port fb30160e-ee19-459c-bf4a-efa6ee135043. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1432.036863] env[62813]: DEBUG nova.network.neutron [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Updating instance_info_cache with network_info: [{"id": "fb30160e-ee19-459c-bf4a-efa6ee135043", "address": "fa:16:3e:f9:ef:c1", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb30160e-ee", "ovs_interfaceid": "fb30160e-ee19-459c-bf4a-efa6ee135043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.048808] env[62813]: DEBUG oslo_concurrency.lockutils [req-957c526c-2f39-4100-955e-1b0864c78d75 req-5588ce10-8af0-452e-a54b-0c41f7793e31 service nova] Releasing lock "refresh_cache-07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.164682] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.164984] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances with incomplete migration {{(pid=62813) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1433.959478] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.959710] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.174432] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.174432] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1435.185450] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] There are 0 instances to clean {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1436.174836] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.174836] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1439.164361] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.164462] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.455713] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3eb29875-9c29-496b-8288-90fc8c64784d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Acquiring lock "401a154d-ff81-4c5d-9860-eae30f7a2171" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.455999] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3eb29875-9c29-496b-8288-90fc8c64784d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "401a154d-ff81-4c5d-9860-eae30f7a2171" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.164415] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.164697] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1444.164774] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1444.187950] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188138] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188275] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188404] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188529] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188653] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188770] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.188889] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.189015] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.189137] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1444.189256] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1444.189759] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.185810] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.163352] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.163729] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.163729] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.174963] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.175255] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.175444] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.175605] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1447.176767] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736e8e77-24e4-4af5-9a41-f97484c4fd7a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.185729] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6746ad-4b58-497e-b94d-df8b21b1ea3e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.200202] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a9a875-8ec9-4209-be6a-a6cecd152f10 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.206851] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1b596b-2451-4b8a-a9d5-54c87d53c432 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.236403] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180777MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1447.236575] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.236744] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.411841] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412027] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412173] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412306] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412471] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412597] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412726] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412852] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.412962] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.413133] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.424659] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.435587] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.446963] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a296754a-5842-4ab5-9dd9-ccda09caa7d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.457728] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5a2ecdf4-b577-41a5-bb04-a0e4efac48f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.467852] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.478129] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.488196] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.498965] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 401a154d-ff81-4c5d-9860-eae30f7a2171 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.498965] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1447.499162] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1447.515477] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing inventories for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1447.530237] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating ProviderTree inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1447.531029] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1447.542302] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing aggregate associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, aggregates: None {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1447.561884] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing trait associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1447.765365] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4b58f8-0530-4f4c-9e64-91182002c38d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.772849] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d7c452-25fc-4ba4-a8ea-d196e8dfbfd0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.801682] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458c6b3d-f9f1-4a75-a6db-99c304cef705 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.808798] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1346e73e-cee9-466b-a436-7bc75853d2bc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.822490] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.830991] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1447.847710] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1447.847909] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.611s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.164272] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.332210] env[62813]: WARNING oslo_vmware.rw_handles [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1474.332210] env[62813]: ERROR oslo_vmware.rw_handles [ 1474.332901] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1474.334771] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1474.335050] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Copying Virtual Disk [datastore2] vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/8e3d0cb9-24cb-4949-b903-55044b3a3387/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1474.335690] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9deb9fb2-f304-42a1-8e50-4e2661ba14ea {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.344298] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Waiting for the task: (returnval){ [ 1474.344298] env[62813]: value = "task-4267706" [ 1474.344298] env[62813]: _type = "Task" [ 1474.344298] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.353316] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Task: {'id': task-4267706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.855872] env[62813]: DEBUG oslo_vmware.exceptions [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1474.856191] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.856777] env[62813]: ERROR nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1474.856777] env[62813]: Faults: ['InvalidArgument'] [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Traceback (most recent call last): [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] yield resources [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self.driver.spawn(context, instance, image_meta, [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self._fetch_image_if_missing(context, vi) [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] image_cache(vi, tmp_image_ds_loc) [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] vm_util.copy_virtual_disk( [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] session._wait_for_task(vmdk_copy_task) [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] return self.wait_for_task(task_ref) [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] return evt.wait() [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] result = hub.switch() [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] return self.greenlet.switch() [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self.f(*self.args, **self.kw) [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] raise exceptions.translate_fault(task_info.error) [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Faults: ['InvalidArgument'] [ 1474.856777] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] [ 1474.857840] env[62813]: INFO nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Terminating instance [ 1474.858811] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.859033] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.859283] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a65b15a-2ef8-4842-8beb-d1fb77077bc9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.861579] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1474.861774] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1474.862554] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722827e7-b933-4424-8761-01c5f6d0d4a4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.870347] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1474.870599] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eee3e2ed-afd1-4306-9c92-8ba048843157 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.873055] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.873236] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1474.874252] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc9e1397-92a2-4e3f-b2ac-cceab9913de8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.879497] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Waiting for the task: (returnval){ [ 1474.879497] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5214ad16-5fba-dba6-1026-eb3bb1de9d9a" [ 1474.879497] env[62813]: _type = "Task" [ 1474.879497] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.888881] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5214ad16-5fba-dba6-1026-eb3bb1de9d9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.951049] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1474.951227] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1474.951447] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Deleting the datastore file [datastore2] 1d8d7576-935b-4f51-8475-fe09aad4ea7c {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1474.951763] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1d1a68a-c79a-4d4a-8556-23ac961d2765 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.958930] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Waiting for the task: (returnval){ [ 1474.958930] env[62813]: value = "task-4267708" [ 1474.958930] env[62813]: _type = "Task" [ 1474.958930] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.968066] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Task: {'id': task-4267708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.391106] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1475.391603] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Creating directory with path [datastore2] vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1475.391744] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4721a342-692c-4a2c-9ac0-ede4b6a523ac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.405911] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Created directory with path [datastore2] vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1475.406127] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Fetch image to [datastore2] vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1475.406308] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1475.407138] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c0990f-617f-43cc-ae7e-bb9a2de442fa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.414552] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da0a910-5e3c-4d0c-ad79-00377ec12ded {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.424516] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbe0109-9a74-42d2-8339-01750a8f44f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.456791] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd46e916-fa24-4cc8-99e7-e0bdd2dd0e6a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.468971] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d8edc818-a150-4f13-b9c8-43fb89bdf94a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.470865] env[62813]: DEBUG oslo_vmware.api [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Task: {'id': task-4267708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079699} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.471130] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1475.471320] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1475.471496] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1475.471668] env[62813]: INFO nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1475.473917] env[62813]: DEBUG nova.compute.claims [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1475.474099] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.474315] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.498486] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1475.559285] env[62813]: DEBUG oslo_vmware.rw_handles [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1475.621552] env[62813]: DEBUG oslo_vmware.rw_handles [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1475.621747] env[62813]: DEBUG oslo_vmware.rw_handles [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1475.819468] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b2ae17-2dcf-4d69-a5a4-0fa6b8230940 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.828012] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f33cd9f-b79a-4019-9baf-a714313fa01c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.859371] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3ce0d3-2ec4-4ed3-9d0a-c76996437c0e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.867365] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61154958-d0ba-4ca3-87bd-eba23e568eee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.880873] env[62813]: DEBUG nova.compute.provider_tree [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.892377] env[62813]: DEBUG nova.scheduler.client.report [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.909208] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.435s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.909726] env[62813]: ERROR nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1475.909726] env[62813]: Faults: ['InvalidArgument'] [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Traceback (most recent call last): [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self.driver.spawn(context, instance, image_meta, [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self._fetch_image_if_missing(context, vi) [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] image_cache(vi, tmp_image_ds_loc) [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] vm_util.copy_virtual_disk( [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] session._wait_for_task(vmdk_copy_task) [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] return self.wait_for_task(task_ref) [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] return evt.wait() [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] result = hub.switch() [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] return self.greenlet.switch() [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] self.f(*self.args, **self.kw) [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] raise exceptions.translate_fault(task_info.error) [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Faults: ['InvalidArgument'] [ 1475.909726] env[62813]: ERROR nova.compute.manager [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] [ 1475.910635] env[62813]: DEBUG nova.compute.utils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1475.912066] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Build of instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c was re-scheduled: A specified parameter was not correct: fileType [ 1475.912066] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1475.912469] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1475.912645] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1475.912818] env[62813]: DEBUG nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1475.912982] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1476.348306] env[62813]: DEBUG nova.network.neutron [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.360950] env[62813]: INFO nova.compute.manager [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Took 0.45 seconds to deallocate network for instance. [ 1476.475458] env[62813]: INFO nova.scheduler.client.report [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Deleted allocations for instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c [ 1476.500168] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d704879a-e6fa-4ee8-a913-042e224d9274 tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 649.448s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.501486] env[62813]: DEBUG oslo_concurrency.lockutils [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 452.641s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.501717] env[62813]: DEBUG oslo_concurrency.lockutils [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Acquiring lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.501928] env[62813]: DEBUG oslo_concurrency.lockutils [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.502111] env[62813]: DEBUG oslo_concurrency.lockutils [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.504462] env[62813]: INFO nova.compute.manager [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Terminating instance [ 1476.506433] env[62813]: DEBUG nova.compute.manager [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1476.506674] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1476.507190] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46678a60-5601-4a3c-8212-a7684b84ad8f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.512446] env[62813]: DEBUG nova.compute.manager [None req-b2f93154-c0af-48b7-89ca-1da59a7f9f83 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: b06482cf-8823-41af-a940-2dfb5d72e70e] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1476.519199] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342f8197-2780-4f66-8353-bf8ed709307c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.538481] env[62813]: DEBUG nova.compute.manager [None req-b2f93154-c0af-48b7-89ca-1da59a7f9f83 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] [instance: b06482cf-8823-41af-a940-2dfb5d72e70e] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1476.550105] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1d8d7576-935b-4f51-8475-fe09aad4ea7c could not be found. [ 1476.550340] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1476.550525] env[62813]: INFO nova.compute.manager [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1476.550788] env[62813]: DEBUG oslo.service.loopingcall [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.551049] env[62813]: DEBUG nova.compute.manager [-] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1476.551155] env[62813]: DEBUG nova.network.neutron [-] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1476.570495] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b2f93154-c0af-48b7-89ca-1da59a7f9f83 tempest-ImagesTestJSON-1157233401 tempest-ImagesTestJSON-1157233401-project-member] Lock "b06482cf-8823-41af-a940-2dfb5d72e70e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.758s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.581268] env[62813]: DEBUG nova.network.neutron [-] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.583895] env[62813]: DEBUG nova.compute.manager [None req-4faa9eba-b19b-4e54-9869-87a2b15bb28f tempest-ServersListShow296Test-1233843078 tempest-ServersListShow296Test-1233843078-project-member] [instance: 8b7930d8-8117-4b3d-8218-39bff602a4b4] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1476.594217] env[62813]: INFO nova.compute.manager [-] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] Took 0.04 seconds to deallocate network for instance. [ 1476.609153] env[62813]: DEBUG nova.compute.manager [None req-4faa9eba-b19b-4e54-9869-87a2b15bb28f tempest-ServersListShow296Test-1233843078 tempest-ServersListShow296Test-1233843078-project-member] [instance: 8b7930d8-8117-4b3d-8218-39bff602a4b4] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1476.630177] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4faa9eba-b19b-4e54-9869-87a2b15bb28f tempest-ServersListShow296Test-1233843078 tempest-ServersListShow296Test-1233843078-project-member] Lock "8b7930d8-8117-4b3d-8218-39bff602a4b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.421s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.644787] env[62813]: DEBUG nova.compute.manager [None req-3877d6ba-930c-405b-b741-88971af2c20c tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: 467d9133-7fe6-44e7-9ffb-f0edacf3be81] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1476.670729] env[62813]: DEBUG nova.compute.manager [None req-3877d6ba-930c-405b-b741-88971af2c20c tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: 467d9133-7fe6-44e7-9ffb-f0edacf3be81] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1476.698199] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3877d6ba-930c-405b-b741-88971af2c20c tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "467d9133-7fe6-44e7-9ffb-f0edacf3be81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.259s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.711229] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1476.726927] env[62813]: DEBUG oslo_concurrency.lockutils [None req-10962ca0-40fc-4346-bad1-4825c0e1567a tempest-ServersTestManualDisk-367556346 tempest-ServersTestManualDisk-367556346-project-member] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.225s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.727912] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 328.031s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.728127] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1d8d7576-935b-4f51-8475-fe09aad4ea7c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1476.728311] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "1d8d7576-935b-4f51-8475-fe09aad4ea7c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.774674] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.774674] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.776276] env[62813]: INFO nova.compute.claims [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1477.048881] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0b21b8-2937-435e-8b2f-609dc0e6655e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.057213] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15494b54-0460-49d1-840a-e7a919da9082 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.088215] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba6110a-95a1-433c-8901-34ea6e6ada33 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.096440] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f50b87-87f2-41e2-87e6-dc0869ea60c6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.111329] env[62813]: DEBUG nova.compute.provider_tree [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.123061] env[62813]: DEBUG nova.scheduler.client.report [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1477.138586] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.364s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.139794] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1477.178010] env[62813]: DEBUG nova.compute.utils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1477.179352] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1477.179541] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1477.191037] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1477.257843] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1477.285377] env[62813]: DEBUG nova.policy [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '753e0ee081674f48987c38a86a803d65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e864df6d50549cabd12cc2a44e28746', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1477.293657] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1477.293901] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1477.294074] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.294258] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1477.294398] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.294542] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1477.294750] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1477.294906] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1477.295158] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1477.295253] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1477.295482] env[62813]: DEBUG nova.virt.hardware [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1477.296700] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26318ab4-3318-4e6c-9559-c0bae4d9ad38 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.308871] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70c7bcd-6785-4a81-8bc4-765ca555c658 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.682647] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Successfully created port: 85b93eb1-0513-497d-98b1-84b0b94fe405 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1478.417617] env[62813]: DEBUG nova.compute.manager [req-362971de-f7d9-479c-bdd4-b0e0fc87079e req-84ea37c9-022c-42a8-8053-a12a5f8ef05c service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Received event network-vif-plugged-85b93eb1-0513-497d-98b1-84b0b94fe405 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1478.417811] env[62813]: DEBUG oslo_concurrency.lockutils [req-362971de-f7d9-479c-bdd4-b0e0fc87079e req-84ea37c9-022c-42a8-8053-a12a5f8ef05c service nova] Acquiring lock "c9402929-e845-416b-91e5-39d08ab90a2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.418043] env[62813]: DEBUG oslo_concurrency.lockutils [req-362971de-f7d9-479c-bdd4-b0e0fc87079e req-84ea37c9-022c-42a8-8053-a12a5f8ef05c service nova] Lock "c9402929-e845-416b-91e5-39d08ab90a2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.418220] env[62813]: DEBUG oslo_concurrency.lockutils [req-362971de-f7d9-479c-bdd4-b0e0fc87079e req-84ea37c9-022c-42a8-8053-a12a5f8ef05c service nova] Lock "c9402929-e845-416b-91e5-39d08ab90a2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.418389] env[62813]: DEBUG nova.compute.manager [req-362971de-f7d9-479c-bdd4-b0e0fc87079e req-84ea37c9-022c-42a8-8053-a12a5f8ef05c service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] No waiting events found dispatching network-vif-plugged-85b93eb1-0513-497d-98b1-84b0b94fe405 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.418552] env[62813]: WARNING nova.compute.manager [req-362971de-f7d9-479c-bdd4-b0e0fc87079e req-84ea37c9-022c-42a8-8053-a12a5f8ef05c service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Received unexpected event network-vif-plugged-85b93eb1-0513-497d-98b1-84b0b94fe405 for instance with vm_state building and task_state spawning. [ 1478.419495] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Successfully updated port: 85b93eb1-0513-497d-98b1-84b0b94fe405 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1478.431264] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "refresh_cache-c9402929-e845-416b-91e5-39d08ab90a2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.431410] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquired lock "refresh_cache-c9402929-e845-416b-91e5-39d08ab90a2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.431562] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1478.483016] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1478.755624] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Updating instance_info_cache with network_info: [{"id": "85b93eb1-0513-497d-98b1-84b0b94fe405", "address": "fa:16:3e:e7:7f:ac", "network": {"id": "877546ca-fcf7-4155-96f1-d72104e030a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1952734465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e864df6d50549cabd12cc2a44e28746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b93eb1-05", "ovs_interfaceid": "85b93eb1-0513-497d-98b1-84b0b94fe405", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.769931] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Releasing lock "refresh_cache-c9402929-e845-416b-91e5-39d08ab90a2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.770279] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance network_info: |[{"id": "85b93eb1-0513-497d-98b1-84b0b94fe405", "address": "fa:16:3e:e7:7f:ac", "network": {"id": "877546ca-fcf7-4155-96f1-d72104e030a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1952734465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e864df6d50549cabd12cc2a44e28746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b93eb1-05", "ovs_interfaceid": "85b93eb1-0513-497d-98b1-84b0b94fe405", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1478.770714] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:7f:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85b93eb1-0513-497d-98b1-84b0b94fe405', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.778472] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Creating folder: Project (4e864df6d50549cabd12cc2a44e28746). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1478.779095] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bfe4d79-f2c2-4fd0-b6ec-e561ed7ea68d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.791476] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Created folder: Project (4e864df6d50549cabd12cc2a44e28746) in parent group-v840812. [ 1478.791697] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Creating folder: Instances. Parent ref: group-v840896. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1478.791991] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf4404e0-4e22-4186-bac5-af93a5afa058 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.802407] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Created folder: Instances in parent group-v840896. [ 1478.802683] env[62813]: DEBUG oslo.service.loopingcall [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.802869] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1478.803096] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b124a80-cd5f-47d0-bd48-a3f675d5a9de {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.823972] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.823972] env[62813]: value = "task-4267711" [ 1478.823972] env[62813]: _type = "Task" [ 1478.823972] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.833133] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267711, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.335854] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267711, 'name': CreateVM_Task, 'duration_secs': 0.313928} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.336029] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1479.336734] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.336908] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.337248] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.337505] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a6814b9-cab0-4059-9340-e05ff23ab8ad {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.342952] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Waiting for the task: (returnval){ [ 1479.342952] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]521a60bf-05dd-a2a9-8742-f50787ce8c37" [ 1479.342952] env[62813]: _type = "Task" [ 1479.342952] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.351710] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]521a60bf-05dd-a2a9-8742-f50787ce8c37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.853685] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.854052] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.854176] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.452724] env[62813]: DEBUG nova.compute.manager [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Received event network-changed-85b93eb1-0513-497d-98b1-84b0b94fe405 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1480.452943] env[62813]: DEBUG nova.compute.manager [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Refreshing instance network info cache due to event network-changed-85b93eb1-0513-497d-98b1-84b0b94fe405. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1480.453182] env[62813]: DEBUG oslo_concurrency.lockutils [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] Acquiring lock "refresh_cache-c9402929-e845-416b-91e5-39d08ab90a2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.453330] env[62813]: DEBUG oslo_concurrency.lockutils [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] Acquired lock "refresh_cache-c9402929-e845-416b-91e5-39d08ab90a2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.453548] env[62813]: DEBUG nova.network.neutron [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Refreshing network info cache for port 85b93eb1-0513-497d-98b1-84b0b94fe405 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1481.011663] env[62813]: DEBUG nova.network.neutron [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Updated VIF entry in instance network info cache for port 85b93eb1-0513-497d-98b1-84b0b94fe405. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1481.012296] env[62813]: DEBUG nova.network.neutron [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Updating instance_info_cache with network_info: [{"id": "85b93eb1-0513-497d-98b1-84b0b94fe405", "address": "fa:16:3e:e7:7f:ac", "network": {"id": "877546ca-fcf7-4155-96f1-d72104e030a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1952734465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e864df6d50549cabd12cc2a44e28746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b93eb1-05", "ovs_interfaceid": "85b93eb1-0513-497d-98b1-84b0b94fe405", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.025969] env[62813]: DEBUG oslo_concurrency.lockutils [req-3d91a6c5-051d-48eb-a69f-794fcecf9828 req-c9908e11-f90f-4786-a196-c0d7f636399e service nova] Releasing lock "refresh_cache-c9402929-e845-416b-91e5-39d08ab90a2e" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.186760] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "c9402929-e845-416b-91e5-39d08ab90a2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.172085] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.172452] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1496.546663] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "0d095679-87c7-46f6-8869-42b0f22127e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.546663] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "0d095679-87c7-46f6-8869-42b0f22127e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.164779] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.163884] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.164615] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.164700] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.165171] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1505.165171] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1505.187645] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.187831] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.187943] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188089] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188243] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188374] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188499] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188622] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188747] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188871] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1505.188996] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1506.184068] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.184434] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.163807] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.163983] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.163435] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.176357] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.176637] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.176786] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.176951] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1509.178206] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a55af1-e4d6-4838-bc47-a8839105df7d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.187613] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9a5ddf-fdba-4f21-b3e3-f0493fe5ada8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.202663] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f26bdc-05fe-43bc-9a49-597496a21f49 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.209862] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba2838a-40ea-4386-ba50-62f33ea9adcc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.238776] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180748MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1509.239056] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.239150] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.325624] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.325846] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326027] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326169] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326294] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326415] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326536] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326655] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326774] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.326892] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.339507] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.350819] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a296754a-5842-4ab5-9dd9-ccda09caa7d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.364397] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5a2ecdf4-b577-41a5-bb04-a0e4efac48f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.377068] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.388992] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.404483] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.415488] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 401a154d-ff81-4c5d-9860-eae30f7a2171 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.427633] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.427889] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1509.428073] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1509.649078] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2eb9070-8fb2-4821-9712-efdca91accdd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.657439] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74ca521-a24a-416e-a98c-cb741fe9e599 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.687119] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c1ab72-84e6-433f-9e0e-4836e991d4ae {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.695028] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0295d9f9-32f6-40cb-bd0e-c5d4f3f98e6a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.709275] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.716652] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1509.734936] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1509.735150] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.496s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.816184] env[62813]: WARNING oslo_vmware.rw_handles [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1521.816184] env[62813]: ERROR oslo_vmware.rw_handles [ 1521.816819] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1521.818926] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1521.819206] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Copying Virtual Disk [datastore2] vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/17ba3b59-4fb4-490a-94c9-7c0f3e6fda1d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1521.819524] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f318bf2-6d82-4839-bfb0-a785825a20ae {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.827470] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Waiting for the task: (returnval){ [ 1521.827470] env[62813]: value = "task-4267712" [ 1521.827470] env[62813]: _type = "Task" [ 1521.827470] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.835708] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Task: {'id': task-4267712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.338966] env[62813]: DEBUG oslo_vmware.exceptions [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1522.338966] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.339324] env[62813]: ERROR nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1522.339324] env[62813]: Faults: ['InvalidArgument'] [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Traceback (most recent call last): [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] yield resources [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self.driver.spawn(context, instance, image_meta, [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self._fetch_image_if_missing(context, vi) [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] image_cache(vi, tmp_image_ds_loc) [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] vm_util.copy_virtual_disk( [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] session._wait_for_task(vmdk_copy_task) [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] return self.wait_for_task(task_ref) [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] return evt.wait() [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] result = hub.switch() [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] return self.greenlet.switch() [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self.f(*self.args, **self.kw) [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] raise exceptions.translate_fault(task_info.error) [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Faults: ['InvalidArgument'] [ 1522.339324] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] [ 1522.340312] env[62813]: INFO nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Terminating instance [ 1522.341362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.341570] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.341816] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e619e522-8c97-4e3d-9d28-5ddc8f8db851 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.344308] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1522.344505] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1522.345238] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e51271f-5917-4a92-8a56-cc00249bb6e1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.352189] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1522.352414] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39b05bb3-d7e0-453d-b7f7-130e263b8a6a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.354603] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.354775] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1522.355728] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4e2629-b696-4a44-948c-dc845121ce19 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.361018] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Waiting for the task: (returnval){ [ 1522.361018] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]529f6c3b-a110-42ba-7765-d6f3e7bd8686" [ 1522.361018] env[62813]: _type = "Task" [ 1522.361018] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.370596] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]529f6c3b-a110-42ba-7765-d6f3e7bd8686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.431898] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1522.432134] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1522.432323] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Deleting the datastore file [datastore2] a3cd73ab-b0e7-43f8-bba7-8539f89a1787 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1522.432594] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99047647-43ae-46d6-858f-e7fb2c05071a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.439629] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Waiting for the task: (returnval){ [ 1522.439629] env[62813]: value = "task-4267714" [ 1522.439629] env[62813]: _type = "Task" [ 1522.439629] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.448024] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Task: {'id': task-4267714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.872063] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1522.872063] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Creating directory with path [datastore2] vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.872063] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a48cc04-4c0f-43b7-b4c9-3eb355c98f83 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.883801] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Created directory with path [datastore2] vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.883999] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Fetch image to [datastore2] vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1522.884179] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1522.884917] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976ab2fe-3585-4a2e-8651-97b2b2cae9c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.891805] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6192509-048b-43c8-9104-f93a56e0a3ef {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.901293] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01558a1e-1b93-40ed-b682-7f9b191f4bbe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.934096] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5e943a-8b5f-4f7c-9f52-fdb9e065c4b4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.943915] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5c21f40d-18b3-4c84-9dbb-6c26fe8b9689 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.950884] env[62813]: DEBUG oslo_vmware.api [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Task: {'id': task-4267714, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068693} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.951205] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1522.951407] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1522.951581] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1522.951754] env[62813]: INFO nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1522.954171] env[62813]: DEBUG nova.compute.claims [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1522.954426] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.954560] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.966100] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1523.150860] env[62813]: DEBUG oslo_vmware.rw_handles [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1523.210072] env[62813]: DEBUG oslo_vmware.rw_handles [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1523.210284] env[62813]: DEBUG oslo_vmware.rw_handles [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1523.299926] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278ba825-9758-4fcf-b884-430642c1dd2b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.308309] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a81913d-aae2-4358-9e1c-e2ab1792281a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.338137] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7236fe6d-f6c4-48f4-9790-c7f37cd0ff92 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.346075] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3eae58-d185-431b-95dc-48c6366e3fc0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.360277] env[62813]: DEBUG nova.compute.provider_tree [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.369916] env[62813]: DEBUG nova.scheduler.client.report [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1523.384771] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.430s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.385325] env[62813]: ERROR nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.385325] env[62813]: Faults: ['InvalidArgument'] [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Traceback (most recent call last): [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self.driver.spawn(context, instance, image_meta, [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self._fetch_image_if_missing(context, vi) [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] image_cache(vi, tmp_image_ds_loc) [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] vm_util.copy_virtual_disk( [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] session._wait_for_task(vmdk_copy_task) [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] return self.wait_for_task(task_ref) [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] return evt.wait() [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] result = hub.switch() [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] return self.greenlet.switch() [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] self.f(*self.args, **self.kw) [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] raise exceptions.translate_fault(task_info.error) [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Faults: ['InvalidArgument'] [ 1523.385325] env[62813]: ERROR nova.compute.manager [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] [ 1523.386281] env[62813]: DEBUG nova.compute.utils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1523.387696] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Build of instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 was re-scheduled: A specified parameter was not correct: fileType [ 1523.387696] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1523.388138] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1523.388336] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1523.388512] env[62813]: DEBUG nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1523.388684] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1523.730663] env[62813]: DEBUG nova.network.neutron [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.746753] env[62813]: INFO nova.compute.manager [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Took 0.36 seconds to deallocate network for instance. [ 1523.850125] env[62813]: INFO nova.scheduler.client.report [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Deleted allocations for instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 [ 1523.872761] env[62813]: DEBUG oslo_concurrency.lockutils [None req-536c27f7-329c-4271-b2f2-a10a13d94a50 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 689.048s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.874035] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 491.814s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.874035] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.874200] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.874330] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.876972] env[62813]: INFO nova.compute.manager [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Terminating instance [ 1523.878571] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquiring lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.878734] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Acquired lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.878900] env[62813]: DEBUG nova.network.neutron [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1523.892333] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1523.915133] env[62813]: DEBUG nova.network.neutron [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1523.939901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.940167] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.941637] env[62813]: INFO nova.compute.claims [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.058494] env[62813]: DEBUG nova.network.neutron [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.068217] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Releasing lock "refresh_cache-a3cd73ab-b0e7-43f8-bba7-8539f89a1787" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.068217] env[62813]: DEBUG nova.compute.manager [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1524.068451] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1524.068916] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d369095-a23d-4098-96ba-9dfe73726962 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.081904] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711eb8e5-fefa-47bb-87f0-e8c187e8e5f5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.115819] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a3cd73ab-b0e7-43f8-bba7-8539f89a1787 could not be found. [ 1524.116203] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1524.116443] env[62813]: INFO nova.compute.manager [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1524.116705] env[62813]: DEBUG oslo.service.loopingcall [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.119349] env[62813]: DEBUG nova.compute.manager [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1524.119604] env[62813]: DEBUG nova.network.neutron [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1524.142876] env[62813]: DEBUG nova.network.neutron [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1524.150704] env[62813]: DEBUG nova.network.neutron [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.161268] env[62813]: INFO nova.compute.manager [-] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] Took 0.04 seconds to deallocate network for instance. [ 1524.261938] env[62813]: DEBUG oslo_concurrency.lockutils [None req-313e84c9-87e9-4179-aab3-ad3a2afaf9d7 tempest-AttachInterfacesTestJSON-29643015 tempest-AttachInterfacesTestJSON-29643015-project-member] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.388s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.262807] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 375.565s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.263027] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a3cd73ab-b0e7-43f8-bba7-8539f89a1787] During sync_power_state the instance has a pending task (deleting). Skip. [ 1524.263248] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "a3cd73ab-b0e7-43f8-bba7-8539f89a1787" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.268934] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd68693-d948-413b-9e8b-c47ae10349ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.278114] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e716a925-a386-44d9-8f3b-14a9518ed1a9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.310248] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa61e9b1-440a-402d-83b8-084aea45bc62 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.318171] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5c2bed-34e8-4d1d-967b-bdd68d7717dc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.331903] env[62813]: DEBUG nova.compute.provider_tree [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.340592] env[62813]: DEBUG nova.scheduler.client.report [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.354437] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.354937] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1524.387195] env[62813]: DEBUG nova.compute.utils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1524.389446] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1524.389629] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1524.401785] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1524.472359] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1524.476353] env[62813]: DEBUG nova.policy [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28b1e91657764c1e9d650420f591fb18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '123df1a06a68413d86e873e37c4591c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1524.498037] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.498329] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.498516] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.498706] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.498858] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.499016] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.499239] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.499404] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.499575] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.499742] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.499916] env[62813]: DEBUG nova.virt.hardware [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.500810] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad623b7-e360-4116-8163-8353b3fc8778 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.509632] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e2278f-e9ae-48d8-9d2b-5e0ff348d6be {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.802600] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Successfully created port: 3779c202-bab1-4dfe-9f5c-af880abd7e97 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1525.736669] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Successfully updated port: 3779c202-bab1-4dfe-9f5c-af880abd7e97 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1525.755801] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "refresh_cache-d5f63ddc-e786-471d-a871-2ef878bd2455" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.756361] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquired lock "refresh_cache-d5f63ddc-e786-471d-a871-2ef878bd2455" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.756689] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1525.782098] env[62813]: DEBUG nova.compute.manager [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Received event network-vif-plugged-3779c202-bab1-4dfe-9f5c-af880abd7e97 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1525.782269] env[62813]: DEBUG oslo_concurrency.lockutils [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] Acquiring lock "d5f63ddc-e786-471d-a871-2ef878bd2455-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.782559] env[62813]: DEBUG oslo_concurrency.lockutils [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.782782] env[62813]: DEBUG oslo_concurrency.lockutils [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.782917] env[62813]: DEBUG nova.compute.manager [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] No waiting events found dispatching network-vif-plugged-3779c202-bab1-4dfe-9f5c-af880abd7e97 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1525.783096] env[62813]: WARNING nova.compute.manager [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Received unexpected event network-vif-plugged-3779c202-bab1-4dfe-9f5c-af880abd7e97 for instance with vm_state building and task_state spawning. [ 1525.783265] env[62813]: DEBUG nova.compute.manager [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Received event network-changed-3779c202-bab1-4dfe-9f5c-af880abd7e97 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1525.783422] env[62813]: DEBUG nova.compute.manager [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Refreshing instance network info cache due to event network-changed-3779c202-bab1-4dfe-9f5c-af880abd7e97. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1525.783615] env[62813]: DEBUG oslo_concurrency.lockutils [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] Acquiring lock "refresh_cache-d5f63ddc-e786-471d-a871-2ef878bd2455" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.832646] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1526.018154] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Updating instance_info_cache with network_info: [{"id": "3779c202-bab1-4dfe-9f5c-af880abd7e97", "address": "fa:16:3e:29:71:da", "network": {"id": "247c1272-4f20-4c41-908d-6bbb91efb933", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1316865178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "123df1a06a68413d86e873e37c4591c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3779c202-ba", "ovs_interfaceid": "3779c202-bab1-4dfe-9f5c-af880abd7e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.030394] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Releasing lock "refresh_cache-d5f63ddc-e786-471d-a871-2ef878bd2455" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.030737] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance network_info: |[{"id": "3779c202-bab1-4dfe-9f5c-af880abd7e97", "address": "fa:16:3e:29:71:da", "network": {"id": "247c1272-4f20-4c41-908d-6bbb91efb933", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1316865178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "123df1a06a68413d86e873e37c4591c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3779c202-ba", "ovs_interfaceid": "3779c202-bab1-4dfe-9f5c-af880abd7e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1526.031071] env[62813]: DEBUG oslo_concurrency.lockutils [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] Acquired lock "refresh_cache-d5f63ddc-e786-471d-a871-2ef878bd2455" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.031256] env[62813]: DEBUG nova.network.neutron [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Refreshing network info cache for port 3779c202-bab1-4dfe-9f5c-af880abd7e97 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1526.032345] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:71:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f1b07b1-e4e5-4842-9090-07fb2c3e124b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3779c202-bab1-4dfe-9f5c-af880abd7e97', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1526.040223] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Creating folder: Project (123df1a06a68413d86e873e37c4591c7). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1526.043182] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61277adc-c5ab-43e8-ad30-08bf15ff3c43 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.056470] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Created folder: Project (123df1a06a68413d86e873e37c4591c7) in parent group-v840812. [ 1526.057202] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Creating folder: Instances. Parent ref: group-v840899. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1526.057202] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cfacded-53cf-4d5d-ba88-41cb3a2b0cdc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.066645] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Created folder: Instances in parent group-v840899. [ 1526.066884] env[62813]: DEBUG oslo.service.loopingcall [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.067133] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1526.067358] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5b75007-8190-4043-af50-036f2746fd87 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.089831] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1526.089831] env[62813]: value = "task-4267717" [ 1526.089831] env[62813]: _type = "Task" [ 1526.089831] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.098159] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267717, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.359755] env[62813]: DEBUG nova.network.neutron [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Updated VIF entry in instance network info cache for port 3779c202-bab1-4dfe-9f5c-af880abd7e97. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1526.360181] env[62813]: DEBUG nova.network.neutron [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Updating instance_info_cache with network_info: [{"id": "3779c202-bab1-4dfe-9f5c-af880abd7e97", "address": "fa:16:3e:29:71:da", "network": {"id": "247c1272-4f20-4c41-908d-6bbb91efb933", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1316865178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "123df1a06a68413d86e873e37c4591c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3779c202-ba", "ovs_interfaceid": "3779c202-bab1-4dfe-9f5c-af880abd7e97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.371563] env[62813]: DEBUG oslo_concurrency.lockutils [req-dc0328c4-2ca3-4079-9ca1-c0cdb5e9f825 req-a27a9697-fcdc-4a53-84f5-820c91e0d4f2 service nova] Releasing lock "refresh_cache-d5f63ddc-e786-471d-a871-2ef878bd2455" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.600407] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267717, 'name': CreateVM_Task, 'duration_secs': 0.323323} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.600609] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1526.601349] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.601547] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.601910] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1526.602222] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd78b77c-c83f-4721-b856-7192efa287a1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.607361] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Waiting for the task: (returnval){ [ 1526.607361] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5266de8f-6e2c-f8b4-1324-f911d688e043" [ 1526.607361] env[62813]: _type = "Task" [ 1526.607361] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.616147] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5266de8f-6e2c-f8b4-1324-f911d688e043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.119539] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.119918] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1527.120164] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.979797] env[62813]: DEBUG oslo_concurrency.lockutils [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "d5f63ddc-e786-471d-a871-2ef878bd2455" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.707836] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.707836] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.849032] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4892e14c-74aa-487e-809f-6405e64ddb32 tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "d43e416f-bdd1-49e1-aebd-838b319fc047" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.849222] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4892e14c-74aa-487e-809f-6405e64ddb32 tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "d43e416f-bdd1-49e1-aebd-838b319fc047" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.735700] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.736033] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1561.165606] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.164317] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.163644] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.164052] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1565.164052] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1565.188251] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.188466] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.188670] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.188864] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189013] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189149] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189272] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189392] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189540] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189673] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1565.189796] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1566.163646] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.160705] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.164945] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1569.163937] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1569.176443] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.176793] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.176838] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.176988] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1569.178294] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0df6769-c989-499c-9a11-4e05f7d7079b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.187726] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e1df18-e39f-4738-be06-09f531f1fdad {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.202621] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25118fc0-c488-45df-8ab6-c4aa84560143 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.209923] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172cf5cf-d6da-47e7-ad6b-7309ec7fc1d3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.240424] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180776MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1569.240587] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.240770] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.318634] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.318804] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319025] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e6442505-b5d0-4736-a24a-41fccda6da6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319088] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319203] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319323] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319498] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319549] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319694] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.319878] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.351212] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.362984] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.374650] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.386362] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 401a154d-ff81-4c5d-9860-eae30f7a2171 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.397562] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.409809] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.421733] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d43e416f-bdd1-49e1-aebd-838b319fc047 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1569.421989] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1569.422155] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1569.648478] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2d5591-94a1-4f4c-b120-5072ac10b2ac {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.656552] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684274d5-ebfd-4631-a8e8-a7c0c94bb860 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.686621] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bec767-f662-4cf5-954b-697dce9163a2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.694777] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28210166-f2c3-4ade-8458-ce2ffce19267 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.710649] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.719909] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1569.734456] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1569.734676] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.494s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.735134] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.780306] env[62813]: WARNING oslo_vmware.rw_handles [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1570.780306] env[62813]: ERROR oslo_vmware.rw_handles [ 1570.781137] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1570.782597] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1570.782839] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Copying Virtual Disk [datastore2] vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/9a432e56-617d-48e3-8b73-607954f9bf55/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1570.783153] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6846b97d-82e7-4ede-bac9-9af6cf5872e9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.793413] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Waiting for the task: (returnval){ [ 1570.793413] env[62813]: value = "task-4267718" [ 1570.793413] env[62813]: _type = "Task" [ 1570.793413] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.802044] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Task: {'id': task-4267718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.304518] env[62813]: DEBUG oslo_vmware.exceptions [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1571.304819] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.305409] env[62813]: ERROR nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1571.305409] env[62813]: Faults: ['InvalidArgument'] [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Traceback (most recent call last): [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] yield resources [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self.driver.spawn(context, instance, image_meta, [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self._fetch_image_if_missing(context, vi) [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] image_cache(vi, tmp_image_ds_loc) [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] vm_util.copy_virtual_disk( [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] session._wait_for_task(vmdk_copy_task) [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] return self.wait_for_task(task_ref) [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] return evt.wait() [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] result = hub.switch() [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] return self.greenlet.switch() [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self.f(*self.args, **self.kw) [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] raise exceptions.translate_fault(task_info.error) [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Faults: ['InvalidArgument'] [ 1571.305409] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] [ 1571.306351] env[62813]: INFO nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Terminating instance [ 1571.307525] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.307668] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.308325] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1571.308520] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1571.308752] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69566632-5f94-454b-8e08-b4557bd0d714 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.311238] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d48a49-2bcb-4615-9d04-20c968bc6ba1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.318897] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1571.319139] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0af034e-7521-4cf9-b2f7-348178aa6308 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.321460] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.321633] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1571.322646] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bb3daae-49c0-493d-a630-f87e8726f763 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.327705] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 1571.327705] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5289a275-ce32-c590-fd06-929a64a74235" [ 1571.327705] env[62813]: _type = "Task" [ 1571.327705] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.336017] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5289a275-ce32-c590-fd06-929a64a74235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.387615] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1571.387891] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1571.388103] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Deleting the datastore file [datastore2] eec98a4d-34f4-4313-8f9c-2fe9f483959c {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1571.388405] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c43066bc-82b5-4250-ba31-5d9cfd0be7bf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.396449] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Waiting for the task: (returnval){ [ 1571.396449] env[62813]: value = "task-4267720" [ 1571.396449] env[62813]: _type = "Task" [ 1571.396449] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.404921] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Task: {'id': task-4267720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.838093] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1571.838482] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Creating directory with path [datastore2] vmware_temp/041967da-f982-477b-a9c0-21f3cf4402c1/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.838623] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3a9930f-3966-4de1-977e-51375e721f5e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.851790] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Created directory with path [datastore2] vmware_temp/041967da-f982-477b-a9c0-21f3cf4402c1/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.852070] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Fetch image to [datastore2] vmware_temp/041967da-f982-477b-a9c0-21f3cf4402c1/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1571.852279] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/041967da-f982-477b-a9c0-21f3cf4402c1/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1571.852994] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a27331-8f53-4ff7-bdc8-70001a0fb31a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.861098] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3763f4e-e4b8-4928-9987-4dfa8e77ef16 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.871506] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f199151e-012a-4ede-b5d4-2c2f489b9058 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.908127] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cdf932-b813-4fb8-9f5d-85664b2e3334 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.916762] env[62813]: DEBUG oslo_vmware.api [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Task: {'id': task-4267720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081558} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.919106] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1571.919106] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1571.919213] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1571.919333] env[62813]: INFO nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1571.921527] env[62813]: DEBUG nova.compute.claims [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1571.921705] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.921919] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.924836] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4c328daa-4c06-4e2e-8660-39e4e75c94d8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.954966] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1572.123496] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.125006] env[62813]: ERROR nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = getattr(controller, method)(*args, **kwargs) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._get(image_id) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] resp, body = self.http_client.get(url, headers=header) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.request(url, 'GET', **kwargs) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._handle_response(resp) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exc.from_response(resp, resp.content) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] yield resources [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self.driver.spawn(context, instance, image_meta, [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._fetch_image_if_missing(context, vi) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image_fetch(context, vi, tmp_image_ds_loc) [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] images.fetch_image( [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1572.125006] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] metadata = IMAGE_API.get(context, image_ref) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return session.show(context, image_id, [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] _reraise_translated_image_exception(image_id) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise new_exc.with_traceback(exc_trace) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = getattr(controller, method)(*args, **kwargs) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._get(image_id) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] resp, body = self.http_client.get(url, headers=header) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.request(url, 'GET', **kwargs) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._handle_response(resp) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exc.from_response(resp, resp.content) [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 1572.126136] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1572.126136] env[62813]: INFO nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Terminating instance [ 1572.126904] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.127127] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.127751] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1572.127943] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1572.128192] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81d80fe1-a157-430a-871e-e8554e284f5c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.131190] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967abb9d-7acf-420c-bfe5-8b712e88e9a7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.140937] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1572.141215] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4301167-71b3-495d-b3bd-66b14aff0532 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.143628] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.143805] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1572.144976] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281fc551-643c-447b-87c8-735df22ccc1f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.153827] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 1572.153827] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ca9962-c4bf-b9cd-bd16-1de197c8dba9" [ 1572.153827] env[62813]: _type = "Task" [ 1572.153827] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.161977] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ca9962-c4bf-b9cd-bd16-1de197c8dba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.214548] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83db472-4fe4-491c-82ab-f06c37d62189 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.222378] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9166d95a-292f-46b5-bb69-4e11895f98c1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.253981] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8dcfdad-0abb-4b93-a44e-7487359d0480 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.258421] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1572.258630] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1572.258803] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Deleting the datastore file [datastore2] d79298e6-bb55-4ba6-9a68-e460c8953c7a {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1572.259112] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82d2edfc-df5e-4dd8-891e-5f16b86a18ad {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.264995] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ae4659-8c2f-4855-869b-84b010ae1609 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.270220] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for the task: (returnval){ [ 1572.270220] env[62813]: value = "task-4267722" [ 1572.270220] env[62813]: _type = "Task" [ 1572.270220] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.281156] env[62813]: DEBUG nova.compute.provider_tree [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.287389] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': task-4267722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.292848] env[62813]: DEBUG nova.scheduler.client.report [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1572.309496] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.387s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.310106] env[62813]: ERROR nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.310106] env[62813]: Faults: ['InvalidArgument'] [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Traceback (most recent call last): [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self.driver.spawn(context, instance, image_meta, [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self._fetch_image_if_missing(context, vi) [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] image_cache(vi, tmp_image_ds_loc) [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] vm_util.copy_virtual_disk( [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] session._wait_for_task(vmdk_copy_task) [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] return self.wait_for_task(task_ref) [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] return evt.wait() [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] result = hub.switch() [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] return self.greenlet.switch() [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] self.f(*self.args, **self.kw) [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] raise exceptions.translate_fault(task_info.error) [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Faults: ['InvalidArgument'] [ 1572.310106] env[62813]: ERROR nova.compute.manager [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] [ 1572.311018] env[62813]: DEBUG nova.compute.utils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1572.312407] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Build of instance eec98a4d-34f4-4313-8f9c-2fe9f483959c was re-scheduled: A specified parameter was not correct: fileType [ 1572.312407] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1572.312816] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1572.312996] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1572.313198] env[62813]: DEBUG nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1572.313379] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1572.665365] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1572.665644] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating directory with path [datastore2] vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.666028] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54b72293-aa6a-457c-9671-292dd7e5822f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.678229] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Created directory with path [datastore2] vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.678439] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Fetch image to [datastore2] vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1572.678613] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1572.679402] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2d8241-4349-44ca-b251-84c697ff0469 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.686954] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15031d5-02b8-4a07-b1a0-b9d67b605ff3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.697087] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf1342b-bddf-429a-bd68-f26fe7f68fd8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.733406] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb3f693-b7a0-4ad6-ba64-3585bf99dcf4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.741299] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6b892635-839a-4ca6-a507-10a276691da7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.746353] env[62813]: DEBUG nova.network.neutron [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.759605] env[62813]: INFO nova.compute.manager [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Took 0.45 seconds to deallocate network for instance. [ 1572.766655] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1572.779904] env[62813]: DEBUG oslo_vmware.api [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Task: {'id': task-4267722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082266} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.780599] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1572.780599] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1572.780599] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1572.780810] env[62813]: INFO nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1572.782872] env[62813]: DEBUG nova.compute.claims [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1572.783069] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.784031] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.836346] env[62813]: DEBUG oslo_vmware.rw_handles [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1572.899046] env[62813]: DEBUG oslo_vmware.rw_handles [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1572.899299] env[62813]: DEBUG oslo_vmware.rw_handles [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1572.919827] env[62813]: INFO nova.scheduler.client.report [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Deleted allocations for instance eec98a4d-34f4-4313-8f9c-2fe9f483959c [ 1572.946168] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4bfd558b-1288-43ba-91e9-9e7250be5252 tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 682.658s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.947544] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 485.591s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.947790] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Acquiring lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.948075] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.948296] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.950527] env[62813]: INFO nova.compute.manager [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Terminating instance [ 1572.955032] env[62813]: DEBUG nova.compute.manager [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1572.955171] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1572.955467] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8895c029-7ecb-4bbe-8a46-56ebfd472539 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.965781] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c532ac40-06fb-424b-aec0-25ea12d7fe11 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.976389] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: a296754a-5842-4ab5-9dd9-ccda09caa7d4] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1573.004070] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eec98a4d-34f4-4313-8f9c-2fe9f483959c could not be found. [ 1573.004334] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1573.004560] env[62813]: INFO nova.compute.manager [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1573.004855] env[62813]: DEBUG oslo.service.loopingcall [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.009036] env[62813]: DEBUG nova.compute.manager [-] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1573.009036] env[62813]: DEBUG nova.network.neutron [-] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.010866] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: a296754a-5842-4ab5-9dd9-ccda09caa7d4] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1573.036154] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "a296754a-5842-4ab5-9dd9-ccda09caa7d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.188s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.038067] env[62813]: DEBUG nova.network.neutron [-] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.047270] env[62813]: INFO nova.compute.manager [-] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] Took 0.04 seconds to deallocate network for instance. [ 1573.052824] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: 5a2ecdf4-b577-41a5-bb04-a0e4efac48f2] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1573.085106] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: 5a2ecdf4-b577-41a5-bb04-a0e4efac48f2] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1573.112984] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "5a2ecdf4-b577-41a5-bb04-a0e4efac48f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.230s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.125086] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1573.146704] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91846654-298f-4f6a-9ed3-ba99b91f7164 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.156314] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf229ae-0644-4b96-b83e-4af84bc2cec4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.190223] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6519987c-9a1e-4607-b282-7c4753f4680c tempest-ServerMetadataTestJSON-569656461 tempest-ServerMetadataTestJSON-569656461-project-member] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.192489] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 424.495s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.192706] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: eec98a4d-34f4-4313-8f9c-2fe9f483959c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1573.192888] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "eec98a4d-34f4-4313-8f9c-2fe9f483959c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.197568] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350b1e16-f016-487a-b8a7-692e1d285c8c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.207234] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271cf0e8-4c92-4825-bd43-94c0b659453e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.212105] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.222711] env[62813]: DEBUG nova.compute.provider_tree [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.231907] env[62813]: DEBUG nova.scheduler.client.report [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1573.248091] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.465s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.248826] env[62813]: ERROR nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = getattr(controller, method)(*args, **kwargs) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._get(image_id) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] resp, body = self.http_client.get(url, headers=header) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.request(url, 'GET', **kwargs) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._handle_response(resp) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exc.from_response(resp, resp.content) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self.driver.spawn(context, instance, image_meta, [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._fetch_image_if_missing(context, vi) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image_fetch(context, vi, tmp_image_ds_loc) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] images.fetch_image( [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] metadata = IMAGE_API.get(context, image_ref) [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1573.248826] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return session.show(context, image_id, [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] _reraise_translated_image_exception(image_id) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise new_exc.with_traceback(exc_trace) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = getattr(controller, method)(*args, **kwargs) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._get(image_id) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] resp, body = self.http_client.get(url, headers=header) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.request(url, 'GET', **kwargs) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._handle_response(resp) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exc.from_response(resp, resp.content) [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 1573.250320] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.250320] env[62813]: DEBUG nova.compute.utils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1573.251060] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Build of instance d79298e6-bb55-4ba6-9a68-e460c8953c7a was re-scheduled: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1573.251472] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1573.251649] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1573.251808] env[62813]: DEBUG nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1573.251972] env[62813]: DEBUG nova.network.neutron [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.253705] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.042s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.255291] env[62813]: INFO nova.compute.claims [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1573.405731] env[62813]: DEBUG neutronclient.v2_0.client [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62813) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1573.407950] env[62813]: ERROR nova.compute.manager [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = getattr(controller, method)(*args, **kwargs) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._get(image_id) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] resp, body = self.http_client.get(url, headers=header) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.request(url, 'GET', **kwargs) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._handle_response(resp) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exc.from_response(resp, resp.content) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self.driver.spawn(context, instance, image_meta, [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._fetch_image_if_missing(context, vi) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image_fetch(context, vi, tmp_image_ds_loc) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] images.fetch_image( [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] metadata = IMAGE_API.get(context, image_ref) [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1573.407950] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return session.show(context, image_id, [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] _reraise_translated_image_exception(image_id) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise new_exc.with_traceback(exc_trace) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = getattr(controller, method)(*args, **kwargs) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._get(image_id) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] resp, body = self.http_client.get(url, headers=header) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.request(url, 'GET', **kwargs) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self._handle_response(resp) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exc.from_response(resp, resp.content) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._build_and_run_instance(context, instance, image, [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exception.RescheduledException( [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] nova.exception.RescheduledException: Build of instance d79298e6-bb55-4ba6-9a68-e460c8953c7a was re-scheduled: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] exception_handler_v20(status_code, error_body) [ 1573.409164] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise client_exc(message=error_message, [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Neutron server returns request_ids: ['req-6ff8eb51-1420-4619-ba2f-67d13c1018ac'] [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._deallocate_network(context, instance, requested_networks) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self.network_api.deallocate_for_instance( [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] data = neutron.list_ports(**search_opts) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.list('ports', self.ports_path, retrieve_all, [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] for r in self._pagination(collection, path, **params): [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] res = self.get(path, params=params) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.retry_request("GET", action, body=body, [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.do_request(method, action, body=body, [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._handle_fault_response(status_code, replybody, resp) [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exception.Unauthorized() [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] nova.exception.Unauthorized: Not authorized. [ 1573.410281] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.475547] env[62813]: INFO nova.scheduler.client.report [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Deleted allocations for instance d79298e6-bb55-4ba6-9a68-e460c8953c7a [ 1573.495750] env[62813]: DEBUG oslo_concurrency.lockutils [None req-44059833-0048-4f52-8f6b-6748d1fb701e tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.469s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.496890] env[62813]: DEBUG oslo_concurrency.lockutils [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.811s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.497207] env[62813]: DEBUG oslo_concurrency.lockutils [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Acquiring lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.497492] env[62813]: DEBUG oslo_concurrency.lockutils [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.497954] env[62813]: DEBUG oslo_concurrency.lockutils [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.503686] env[62813]: INFO nova.compute.manager [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Terminating instance [ 1573.509251] env[62813]: DEBUG nova.compute.manager [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1573.509900] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1573.510708] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fe4f78a-fe13-47d9-814e-c6051233f733 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.518719] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1573.529630] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dec9f3f-ff2a-4663-9bc8-69a78e46fa91 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.554374] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943f7007-e907-4110-acaa-5bfdcc52c981 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.567181] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d79298e6-bb55-4ba6-9a68-e460c8953c7a could not be found. [ 1573.567422] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1573.567643] env[62813]: INFO nova.compute.manager [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1573.567935] env[62813]: DEBUG oslo.service.loopingcall [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.570819] env[62813]: DEBUG nova.compute.manager [-] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1573.570941] env[62813]: DEBUG nova.network.neutron [-] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.576184] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2bfbd4-692a-48db-9c68-3eba23508991 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.613713] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.617875] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f94038-965c-4b33-bc32-d6b504c6f97f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.627950] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3bfe2d-de66-475e-8ca3-69b146e1a9a0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.644618] env[62813]: DEBUG nova.compute.provider_tree [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.656259] env[62813]: DEBUG nova.scheduler.client.report [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1573.671622] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.418s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.672144] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1573.674710] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.061s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.677307] env[62813]: INFO nova.compute.claims [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1573.718171] env[62813]: DEBUG nova.compute.utils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1573.719744] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1573.719925] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1573.724526] env[62813]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62813) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1573.724787] env[62813]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-96fceb5c-b6c9-4102-b45d-1dc1d59206e1'] [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.725498] env[62813]: ERROR oslo.service.loopingcall [ 1573.726818] env[62813]: ERROR nova.compute.manager [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.732686] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1573.766741] env[62813]: ERROR nova.compute.manager [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] exception_handler_v20(status_code, error_body) [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise client_exc(message=error_message, [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Neutron server returns request_ids: ['req-96fceb5c-b6c9-4102-b45d-1dc1d59206e1'] [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During handling of the above exception, another exception occurred: [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Traceback (most recent call last): [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._delete_instance(context, instance, bdms) [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._shutdown_instance(context, instance, bdms) [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._try_deallocate_network(context, instance, requested_networks) [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] with excutils.save_and_reraise_exception(): [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self.force_reraise() [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise self.value [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] _deallocate_network_with_retries() [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return evt.wait() [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = hub.switch() [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.greenlet.switch() [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = func(*self.args, **self.kw) [ 1573.766741] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] result = f(*args, **kwargs) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._deallocate_network( [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self.network_api.deallocate_for_instance( [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] data = neutron.list_ports(**search_opts) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.list('ports', self.ports_path, retrieve_all, [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] for r in self._pagination(collection, path, **params): [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] res = self.get(path, params=params) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.retry_request("GET", action, body=body, [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] return self.do_request(method, action, body=body, [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] ret = obj(*args, **kwargs) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] self._handle_fault_response(status_code, replybody, resp) [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.767923] env[62813]: ERROR nova.compute.manager [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] [ 1573.784016] env[62813]: DEBUG nova.policy [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65799c813c81459286b669666c703d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0921c465ef944f1a50af55040cf7621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1573.797225] env[62813]: DEBUG oslo_concurrency.lockutils [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.300s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.798613] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 425.101s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.798868] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1573.799032] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "d79298e6-bb55-4ba6-9a68-e460c8953c7a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.809133] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1573.842254] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1573.842921] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1573.843604] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1573.843604] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1573.844342] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1573.844669] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1573.844888] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1573.845362] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1573.845362] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1573.845543] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1573.845744] env[62813]: DEBUG nova.virt.hardware [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1573.847077] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4afc0b3-7316-48af-bcf2-50f332146ce2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.861842] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ecc1da-983e-4ab7-a626-a3997527c50b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.884332] env[62813]: INFO nova.compute.manager [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] [instance: d79298e6-bb55-4ba6-9a68-e460c8953c7a] Successfully reverted task state from None on failure for instance. [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server [None req-43224f0f-1389-4e63-bfd1-fa748202daf2 tempest-ServersTestMultiNic-1297526951 tempest-ServersTestMultiNic-1297526951-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-96fceb5c-b6c9-4102-b45d-1dc1d59206e1'] [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1573.889917] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1573.891434] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1573.893208] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1573.893208] env[62813]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1573.893208] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1573.893208] env[62813]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1573.893208] env[62813]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1573.893208] env[62813]: ERROR oslo_messaging.rpc.server [ 1573.974719] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bda8b12-5e25-41f9-b257-77d3f6c8992e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.982968] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8c93c3-0270-485a-baf1-5572ddd051d6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.017024] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29616157-ad26-4959-b31a-5f082279618b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.023610] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71257814-4d96-4f79-9b29-47d0928392ed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.037389] env[62813]: DEBUG nova.compute.provider_tree [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.046200] env[62813]: DEBUG nova.scheduler.client.report [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.060501] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.386s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.061198] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1574.104024] env[62813]: DEBUG nova.compute.utils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1574.106138] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1574.106254] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1574.121517] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1574.206158] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Successfully created port: dcff24fc-0eed-49bc-8fcc-25f27ce4822d {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1574.214965] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1574.224620] env[62813]: DEBUG nova.policy [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e5639b4c294098ac97eae52872b91c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dab4ddba893f4b47886bb54e9083c414', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1574.244776] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.245026] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.245200] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.245393] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.245546] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.246051] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.246322] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.246523] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.246720] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.246897] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.247092] env[62813]: DEBUG nova.virt.hardware [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.248188] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1f4f1c-7856-4c8c-ac80-4cb76115a55e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.258487] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d320647-7ba7-4faa-8b6c-90d30444a4ca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.634843] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Successfully created port: a6bdf346-3a5d-419d-9633-79ed8d843296 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1575.169794] env[62813]: DEBUG nova.compute.manager [req-473696fd-61af-45c6-82d1-989a1d528397 req-851a0fb3-972d-4362-8eab-d4a070176aae service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Received event network-vif-plugged-dcff24fc-0eed-49bc-8fcc-25f27ce4822d {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1575.170089] env[62813]: DEBUG oslo_concurrency.lockutils [req-473696fd-61af-45c6-82d1-989a1d528397 req-851a0fb3-972d-4362-8eab-d4a070176aae service nova] Acquiring lock "176d5151-358a-4b90-9aff-064aa9648618-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.170313] env[62813]: DEBUG oslo_concurrency.lockutils [req-473696fd-61af-45c6-82d1-989a1d528397 req-851a0fb3-972d-4362-8eab-d4a070176aae service nova] Lock "176d5151-358a-4b90-9aff-064aa9648618-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.170406] env[62813]: DEBUG oslo_concurrency.lockutils [req-473696fd-61af-45c6-82d1-989a1d528397 req-851a0fb3-972d-4362-8eab-d4a070176aae service nova] Lock "176d5151-358a-4b90-9aff-064aa9648618-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.170573] env[62813]: DEBUG nova.compute.manager [req-473696fd-61af-45c6-82d1-989a1d528397 req-851a0fb3-972d-4362-8eab-d4a070176aae service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] No waiting events found dispatching network-vif-plugged-dcff24fc-0eed-49bc-8fcc-25f27ce4822d {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1575.170736] env[62813]: WARNING nova.compute.manager [req-473696fd-61af-45c6-82d1-989a1d528397 req-851a0fb3-972d-4362-8eab-d4a070176aae service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Received unexpected event network-vif-plugged-dcff24fc-0eed-49bc-8fcc-25f27ce4822d for instance with vm_state building and task_state spawning. [ 1575.256104] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Successfully updated port: dcff24fc-0eed-49bc-8fcc-25f27ce4822d {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.270668] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "refresh_cache-176d5151-358a-4b90-9aff-064aa9648618" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.270957] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired lock "refresh_cache-176d5151-358a-4b90-9aff-064aa9648618" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.271068] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1575.317783] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1575.377826] env[62813]: DEBUG nova.compute.manager [req-1df4cf8e-44f1-4d38-8715-4a0ef868b0c1 req-1701ddb6-be76-49d0-9158-054a0fbf2c00 service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Received event network-vif-plugged-a6bdf346-3a5d-419d-9633-79ed8d843296 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1575.378073] env[62813]: DEBUG oslo_concurrency.lockutils [req-1df4cf8e-44f1-4d38-8715-4a0ef868b0c1 req-1701ddb6-be76-49d0-9158-054a0fbf2c00 service nova] Acquiring lock "2c94570a-7bb0-4719-9982-0e7710470db1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.378291] env[62813]: DEBUG oslo_concurrency.lockutils [req-1df4cf8e-44f1-4d38-8715-4a0ef868b0c1 req-1701ddb6-be76-49d0-9158-054a0fbf2c00 service nova] Lock "2c94570a-7bb0-4719-9982-0e7710470db1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.378461] env[62813]: DEBUG oslo_concurrency.lockutils [req-1df4cf8e-44f1-4d38-8715-4a0ef868b0c1 req-1701ddb6-be76-49d0-9158-054a0fbf2c00 service nova] Lock "2c94570a-7bb0-4719-9982-0e7710470db1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.378634] env[62813]: DEBUG nova.compute.manager [req-1df4cf8e-44f1-4d38-8715-4a0ef868b0c1 req-1701ddb6-be76-49d0-9158-054a0fbf2c00 service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] No waiting events found dispatching network-vif-plugged-a6bdf346-3a5d-419d-9633-79ed8d843296 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1575.378796] env[62813]: WARNING nova.compute.manager [req-1df4cf8e-44f1-4d38-8715-4a0ef868b0c1 req-1701ddb6-be76-49d0-9158-054a0fbf2c00 service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Received unexpected event network-vif-plugged-a6bdf346-3a5d-419d-9633-79ed8d843296 for instance with vm_state building and task_state spawning. [ 1575.475958] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Successfully updated port: a6bdf346-3a5d-419d-9633-79ed8d843296 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.487893] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "refresh_cache-2c94570a-7bb0-4719-9982-0e7710470db1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.488172] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "refresh_cache-2c94570a-7bb0-4719-9982-0e7710470db1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.488399] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1575.560632] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1575.598018] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Updating instance_info_cache with network_info: [{"id": "dcff24fc-0eed-49bc-8fcc-25f27ce4822d", "address": "fa:16:3e:3f:47:72", "network": {"id": "8b829171-2963-4efb-b74a-73be4c3522fe", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1850443047-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0921c465ef944f1a50af55040cf7621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcff24fc-0e", "ovs_interfaceid": "dcff24fc-0eed-49bc-8fcc-25f27ce4822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.614274] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Releasing lock "refresh_cache-176d5151-358a-4b90-9aff-064aa9648618" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.614948] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance network_info: |[{"id": "dcff24fc-0eed-49bc-8fcc-25f27ce4822d", "address": "fa:16:3e:3f:47:72", "network": {"id": "8b829171-2963-4efb-b74a-73be4c3522fe", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1850443047-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0921c465ef944f1a50af55040cf7621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcff24fc-0e", "ovs_interfaceid": "dcff24fc-0eed-49bc-8fcc-25f27ce4822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1575.615729] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:47:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcff24fc-0eed-49bc-8fcc-25f27ce4822d', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1575.624232] env[62813]: DEBUG oslo.service.loopingcall [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.625095] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1575.625396] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c04efb1d-5a4f-4d18-b508-bc5de550542e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.652182] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1575.652182] env[62813]: value = "task-4267723" [ 1575.652182] env[62813]: _type = "Task" [ 1575.652182] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.661914] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267723, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.807822] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Updating instance_info_cache with network_info: [{"id": "a6bdf346-3a5d-419d-9633-79ed8d843296", "address": "fa:16:3e:0f:0d:5c", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bdf346-3a", "ovs_interfaceid": "a6bdf346-3a5d-419d-9633-79ed8d843296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.825958] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "refresh_cache-2c94570a-7bb0-4719-9982-0e7710470db1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.826309] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance network_info: |[{"id": "a6bdf346-3a5d-419d-9633-79ed8d843296", "address": "fa:16:3e:0f:0d:5c", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bdf346-3a", "ovs_interfaceid": "a6bdf346-3a5d-419d-9633-79ed8d843296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1575.826770] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:0d:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4fcde7-8926-402a-a9b7-4878d2bc1cf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6bdf346-3a5d-419d-9633-79ed8d843296', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1575.834946] env[62813]: DEBUG oslo.service.loopingcall [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.835592] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1575.835843] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cc8f4ec-ba3f-4741-9b64-1f82fd820e3e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.857107] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1575.857107] env[62813]: value = "task-4267724" [ 1575.857107] env[62813]: _type = "Task" [ 1575.857107] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.867617] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267724, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.162850] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267723, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.367807] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267724, 'name': CreateVM_Task, 'duration_secs': 0.379979} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.368167] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1576.368799] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.368950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.369362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1576.369651] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a749c894-34a4-4df2-b077-f2c27e6789b0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.374695] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1576.374695] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52cf5648-c3d2-2e21-9424-14d0b311f024" [ 1576.374695] env[62813]: _type = "Task" [ 1576.374695] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.385703] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52cf5648-c3d2-2e21-9424-14d0b311f024, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.663863] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267723, 'name': CreateVM_Task, 'duration_secs': 0.554715} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.664090] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1576.664718] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.886573] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.886878] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1576.887120] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.887339] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.887675] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1576.887935] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ebbed8c-a864-4b14-a7f2-55a54340c37c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.892986] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 1576.892986] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]529cf462-47cf-ca7b-5fa6-8cf07f6b4d58" [ 1576.892986] env[62813]: _type = "Task" [ 1576.892986] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.902260] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]529cf462-47cf-ca7b-5fa6-8cf07f6b4d58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.206441] env[62813]: DEBUG nova.compute.manager [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Received event network-changed-dcff24fc-0eed-49bc-8fcc-25f27ce4822d {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1577.206761] env[62813]: DEBUG nova.compute.manager [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Refreshing instance network info cache due to event network-changed-dcff24fc-0eed-49bc-8fcc-25f27ce4822d. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1577.207042] env[62813]: DEBUG oslo_concurrency.lockutils [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] Acquiring lock "refresh_cache-176d5151-358a-4b90-9aff-064aa9648618" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.207227] env[62813]: DEBUG oslo_concurrency.lockutils [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] Acquired lock "refresh_cache-176d5151-358a-4b90-9aff-064aa9648618" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.207430] env[62813]: DEBUG nova.network.neutron [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Refreshing network info cache for port dcff24fc-0eed-49bc-8fcc-25f27ce4822d {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1577.404679] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.404967] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.405208] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.412979] env[62813]: DEBUG nova.compute.manager [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Received event network-changed-a6bdf346-3a5d-419d-9633-79ed8d843296 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1577.413176] env[62813]: DEBUG nova.compute.manager [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Refreshing instance network info cache due to event network-changed-a6bdf346-3a5d-419d-9633-79ed8d843296. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1577.413392] env[62813]: DEBUG oslo_concurrency.lockutils [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] Acquiring lock "refresh_cache-2c94570a-7bb0-4719-9982-0e7710470db1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.413538] env[62813]: DEBUG oslo_concurrency.lockutils [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] Acquired lock "refresh_cache-2c94570a-7bb0-4719-9982-0e7710470db1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.413701] env[62813]: DEBUG nova.network.neutron [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Refreshing network info cache for port a6bdf346-3a5d-419d-9633-79ed8d843296 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1577.511168] env[62813]: DEBUG nova.network.neutron [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Updated VIF entry in instance network info cache for port dcff24fc-0eed-49bc-8fcc-25f27ce4822d. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1577.511548] env[62813]: DEBUG nova.network.neutron [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Updating instance_info_cache with network_info: [{"id": "dcff24fc-0eed-49bc-8fcc-25f27ce4822d", "address": "fa:16:3e:3f:47:72", "network": {"id": "8b829171-2963-4efb-b74a-73be4c3522fe", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1850443047-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0921c465ef944f1a50af55040cf7621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcff24fc-0e", "ovs_interfaceid": "dcff24fc-0eed-49bc-8fcc-25f27ce4822d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.522111] env[62813]: DEBUG oslo_concurrency.lockutils [req-d253b514-239c-4dba-99bb-c8393f5b04a3 req-0ae11908-6ae2-460a-b0e9-cf38ec342c24 service nova] Releasing lock "refresh_cache-176d5151-358a-4b90-9aff-064aa9648618" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.692342] env[62813]: DEBUG nova.network.neutron [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Updated VIF entry in instance network info cache for port a6bdf346-3a5d-419d-9633-79ed8d843296. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1577.692729] env[62813]: DEBUG nova.network.neutron [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Updating instance_info_cache with network_info: [{"id": "a6bdf346-3a5d-419d-9633-79ed8d843296", "address": "fa:16:3e:0f:0d:5c", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bdf346-3a", "ovs_interfaceid": "a6bdf346-3a5d-419d-9633-79ed8d843296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.703374] env[62813]: DEBUG oslo_concurrency.lockutils [req-e410cc4a-d517-4d4f-bdca-58be5518770a req-79e3b12b-8f92-4b09-9981-70e0651f72fd service nova] Releasing lock "refresh_cache-2c94570a-7bb0-4719-9982-0e7710470db1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.777161] env[62813]: DEBUG oslo_concurrency.lockutils [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "176d5151-358a-4b90-9aff-064aa9648618" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.853330] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "63766a48-0d55-4261-9949-be3335ae8d0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.853750] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "63766a48-0d55-4261-9949-be3335ae8d0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.163577] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.163981] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1620.800605] env[62813]: WARNING oslo_vmware.rw_handles [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1620.800605] env[62813]: ERROR oslo_vmware.rw_handles [ 1620.801454] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1620.802944] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1620.803219] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Copying Virtual Disk [datastore2] vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/6b46dea1-8125-4afc-bcb6-0ddd4f09ec52/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1620.803537] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccccd5f4-c06c-4cb1-9c60-33affbe89a4c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.812888] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 1620.812888] env[62813]: value = "task-4267725" [ 1620.812888] env[62813]: _type = "Task" [ 1620.812888] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.821706] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': task-4267725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.164710] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1621.324021] env[62813]: DEBUG oslo_vmware.exceptions [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1621.324399] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.325044] env[62813]: ERROR nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1621.325044] env[62813]: Faults: ['InvalidArgument'] [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Traceback (most recent call last): [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] yield resources [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self.driver.spawn(context, instance, image_meta, [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self._fetch_image_if_missing(context, vi) [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] image_cache(vi, tmp_image_ds_loc) [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] vm_util.copy_virtual_disk( [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] session._wait_for_task(vmdk_copy_task) [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] return self.wait_for_task(task_ref) [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] return evt.wait() [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] result = hub.switch() [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] return self.greenlet.switch() [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self.f(*self.args, **self.kw) [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] raise exceptions.translate_fault(task_info.error) [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Faults: ['InvalidArgument'] [ 1621.325044] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] [ 1621.325909] env[62813]: INFO nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Terminating instance [ 1621.327199] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.327441] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.327720] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07a39763-8664-4d79-ab3e-ffab3813a9f7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.330280] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1621.330632] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1621.331367] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3e59fd-d366-490d-bf91-bb6daf1795d4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.339097] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1621.339342] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90529d64-3991-48ea-be80-c2280c92ac1e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.341907] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.342112] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1621.343140] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8ccd455-a96b-45f8-adc9-92e27fc7986a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.348250] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1621.348250] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ec39cb-0188-b240-73d7-e7390c19447b" [ 1621.348250] env[62813]: _type = "Task" [ 1621.348250] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.356035] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ec39cb-0188-b240-73d7-e7390c19447b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.417118] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1621.417369] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1621.417558] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Deleting the datastore file [datastore2] e6442505-b5d0-4736-a24a-41fccda6da6f {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1621.417836] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bba0d69-7d99-4550-a773-73e27d441648 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.424609] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 1621.424609] env[62813]: value = "task-4267727" [ 1621.424609] env[62813]: _type = "Task" [ 1621.424609] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.433098] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': task-4267727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.859168] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1621.859538] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating directory with path [datastore2] vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.859721] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-532c5374-ece3-492f-a18e-989a63d9f1fa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.871257] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created directory with path [datastore2] vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.871457] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Fetch image to [datastore2] vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1621.871632] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1621.872384] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8965cd6a-9781-4d23-b7d7-802a60c68bc9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.879205] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f179b3-1cad-4b83-87ca-3e956e8ff855 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.888312] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a8d72f-8d7d-4d26-a498-7133747d249f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.919367] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f7f215-f9fe-4c87-ab6a-6bd6006579d1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.928762] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-96c99307-a9cf-453d-9cb7-3f63a7d3f0a0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.936515] env[62813]: DEBUG oslo_vmware.api [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': task-4267727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075997} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.936756] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1621.936968] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1621.937164] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1621.937343] env[62813]: INFO nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1621.939482] env[62813]: DEBUG nova.compute.claims [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1621.939655] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.939871] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.953982] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1622.015352] env[62813]: DEBUG oslo_vmware.rw_handles [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1622.075348] env[62813]: DEBUG oslo_vmware.rw_handles [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1622.075448] env[62813]: DEBUG oslo_vmware.rw_handles [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1622.262936] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a86445-ed98-46c9-a017-9a5ac0ae28a9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.271116] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c444311-fa4d-4563-b918-fa4672633ebe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.301648] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a2b92c-c06c-44bd-8d58-db5b838be06e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.309821] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86a3da0-bfb8-4244-806b-a093ab99ad6c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.324028] env[62813]: DEBUG nova.compute.provider_tree [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.335548] env[62813]: DEBUG nova.scheduler.client.report [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1622.356085] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.416s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.356286] env[62813]: ERROR nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.356286] env[62813]: Faults: ['InvalidArgument'] [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Traceback (most recent call last): [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self.driver.spawn(context, instance, image_meta, [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self._fetch_image_if_missing(context, vi) [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] image_cache(vi, tmp_image_ds_loc) [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] vm_util.copy_virtual_disk( [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] session._wait_for_task(vmdk_copy_task) [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] return self.wait_for_task(task_ref) [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] return evt.wait() [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] result = hub.switch() [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] return self.greenlet.switch() [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] self.f(*self.args, **self.kw) [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] raise exceptions.translate_fault(task_info.error) [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Faults: ['InvalidArgument'] [ 1622.356286] env[62813]: ERROR nova.compute.manager [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] [ 1622.357185] env[62813]: DEBUG nova.compute.utils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1622.358689] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Build of instance e6442505-b5d0-4736-a24a-41fccda6da6f was re-scheduled: A specified parameter was not correct: fileType [ 1622.358689] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1622.359071] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1622.359275] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1622.359475] env[62813]: DEBUG nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1622.359642] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1622.682324] env[62813]: DEBUG nova.network.neutron [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.698248] env[62813]: INFO nova.compute.manager [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Took 0.34 seconds to deallocate network for instance. [ 1622.805938] env[62813]: INFO nova.scheduler.client.report [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Deleted allocations for instance e6442505-b5d0-4736-a24a-41fccda6da6f [ 1622.828973] env[62813]: DEBUG oslo_concurrency.lockutils [None req-829f27c4-fe18-4c34-9e17-a2a5e9fc94e0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.307s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.830200] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.499s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.830426] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "e6442505-b5d0-4736-a24a-41fccda6da6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.830634] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.830800] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.833217] env[62813]: INFO nova.compute.manager [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Terminating instance [ 1622.834908] env[62813]: DEBUG nova.compute.manager [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1622.835150] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1622.835627] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41d2534c-0e1d-45cd-b775-e0f660a0b69c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.840855] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1622.847453] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3ab737-bc34-441b-9586-e54bd7ac7c42 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.883421] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6442505-b5d0-4736-a24a-41fccda6da6f could not be found. [ 1622.883741] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1622.883828] env[62813]: INFO nova.compute.manager [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1622.884097] env[62813]: DEBUG oslo.service.loopingcall [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.884984] env[62813]: DEBUG nova.compute.manager [-] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1622.885099] env[62813]: DEBUG nova.network.neutron [-] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1622.901517] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.901759] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.903263] env[62813]: INFO nova.compute.claims [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1622.918829] env[62813]: DEBUG nova.network.neutron [-] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.940261] env[62813]: INFO nova.compute.manager [-] [instance: e6442505-b5d0-4736-a24a-41fccda6da6f] Took 0.06 seconds to deallocate network for instance. [ 1623.031371] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1540f5fe-f660-453b-9056-43fe1cd6d0b6 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "e6442505-b5d0-4736-a24a-41fccda6da6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.153801] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79791ae8-772e-460f-94f2-a88b12ec61f6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.162431] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12fae73-ce06-4464-9b4e-d6f6236c4a00 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.194197] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90423d07-51aa-4e83-a5c9-5d88a1c8eb21 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.202454] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a8c870-841d-47cc-aa3e-b3d3a076cfbe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.216351] env[62813]: DEBUG nova.compute.provider_tree [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.226841] env[62813]: DEBUG nova.scheduler.client.report [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1623.242687] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.341s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.243222] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1623.277752] env[62813]: DEBUG nova.compute.utils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1623.279428] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1623.279522] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1623.289783] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1623.347105] env[62813]: DEBUG nova.policy [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a739f23d66a4f3c92a889ee02169cb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '529545a4514b4322945fc7ad30099d9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1623.359055] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1623.386840] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1623.387140] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1623.387309] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1623.387496] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1623.387647] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1623.387800] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1623.388033] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1623.388263] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1623.388408] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1623.388576] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1623.390712] env[62813]: DEBUG nova.virt.hardware [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1623.390712] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab883d3-81b5-4718-8d32-c489162696a3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.398445] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479a623d-4ed2-4052-8975-15d65ec7c2ca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.762813] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Successfully created port: d5ce89ff-d3c3-4f10-93f6-7872187ad2eb {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.165331] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.377604] env[62813]: DEBUG oslo_concurrency.lockutils [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "2c94570a-7bb0-4719-9982-0e7710470db1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.751421] env[62813]: DEBUG nova.compute.manager [req-e180f8f2-47fe-4d1e-8f54-055d3e00f066 req-a132453b-72af-414d-a304-6be86c3f3bd8 service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Received event network-vif-plugged-d5ce89ff-d3c3-4f10-93f6-7872187ad2eb {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1624.751421] env[62813]: DEBUG oslo_concurrency.lockutils [req-e180f8f2-47fe-4d1e-8f54-055d3e00f066 req-a132453b-72af-414d-a304-6be86c3f3bd8 service nova] Acquiring lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.751421] env[62813]: DEBUG oslo_concurrency.lockutils [req-e180f8f2-47fe-4d1e-8f54-055d3e00f066 req-a132453b-72af-414d-a304-6be86c3f3bd8 service nova] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.751421] env[62813]: DEBUG oslo_concurrency.lockutils [req-e180f8f2-47fe-4d1e-8f54-055d3e00f066 req-a132453b-72af-414d-a304-6be86c3f3bd8 service nova] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.751421] env[62813]: DEBUG nova.compute.manager [req-e180f8f2-47fe-4d1e-8f54-055d3e00f066 req-a132453b-72af-414d-a304-6be86c3f3bd8 service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] No waiting events found dispatching network-vif-plugged-d5ce89ff-d3c3-4f10-93f6-7872187ad2eb {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1624.751421] env[62813]: WARNING nova.compute.manager [req-e180f8f2-47fe-4d1e-8f54-055d3e00f066 req-a132453b-72af-414d-a304-6be86c3f3bd8 service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Received unexpected event network-vif-plugged-d5ce89ff-d3c3-4f10-93f6-7872187ad2eb for instance with vm_state building and task_state spawning. [ 1624.753944] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Successfully updated port: d5ce89ff-d3c3-4f10-93f6-7872187ad2eb {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1624.775833] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "refresh_cache-cd5b7232-5d47-43c6-874e-6f9e6b45f420" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.775944] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquired lock "refresh_cache-cd5b7232-5d47-43c6-874e-6f9e6b45f420" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.776125] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1624.821754] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1625.011689] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Updating instance_info_cache with network_info: [{"id": "d5ce89ff-d3c3-4f10-93f6-7872187ad2eb", "address": "fa:16:3e:70:24:f3", "network": {"id": "ad3d5c4f-8479-494f-bfd4-98c36a6f9707", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1741739100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "529545a4514b4322945fc7ad30099d9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ce89ff-d3", "ovs_interfaceid": "d5ce89ff-d3c3-4f10-93f6-7872187ad2eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.024481] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Releasing lock "refresh_cache-cd5b7232-5d47-43c6-874e-6f9e6b45f420" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.024794] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance network_info: |[{"id": "d5ce89ff-d3c3-4f10-93f6-7872187ad2eb", "address": "fa:16:3e:70:24:f3", "network": {"id": "ad3d5c4f-8479-494f-bfd4-98c36a6f9707", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1741739100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "529545a4514b4322945fc7ad30099d9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ce89ff-d3", "ovs_interfaceid": "d5ce89ff-d3c3-4f10-93f6-7872187ad2eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1625.025267] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:24:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56834f67-27a8-43dc-bbc6-a74aaa08959b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5ce89ff-d3c3-4f10-93f6-7872187ad2eb', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.032749] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Creating folder: Project (529545a4514b4322945fc7ad30099d9b). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1625.033332] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8ca5e38-46d0-4323-97ca-666d2359ff08 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.045617] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Created folder: Project (529545a4514b4322945fc7ad30099d9b) in parent group-v840812. [ 1625.045876] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Creating folder: Instances. Parent ref: group-v840904. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1625.046155] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2aecfff8-eacc-4d1c-94f7-f69d6a719918 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.058186] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Created folder: Instances in parent group-v840904. [ 1625.058441] env[62813]: DEBUG oslo.service.loopingcall [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.058635] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1625.058848] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a239ab4-48a2-4a77-96d0-fc58431595db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.078662] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1625.078662] env[62813]: value = "task-4267730" [ 1625.078662] env[62813]: _type = "Task" [ 1625.078662] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.087252] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267730, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.589438] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267730, 'name': CreateVM_Task, 'duration_secs': 0.356442} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.589852] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1625.590342] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.590527] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.590892] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.591166] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2896675-0071-45c8-88d1-0d77852f9f32 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.596074] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Waiting for the task: (returnval){ [ 1625.596074] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]520103de-b89d-d2e9-6726-3019124a18f0" [ 1625.596074] env[62813]: _type = "Task" [ 1625.596074] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.604273] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]520103de-b89d-d2e9-6726-3019124a18f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.106675] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.106856] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1626.107112] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.163835] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1626.164036] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1626.164170] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1626.188640] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.188835] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.188934] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189067] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189199] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189323] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189445] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189567] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189689] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189809] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1626.189930] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1626.783277] env[62813]: DEBUG nova.compute.manager [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Received event network-changed-d5ce89ff-d3c3-4f10-93f6-7872187ad2eb {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1626.783572] env[62813]: DEBUG nova.compute.manager [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Refreshing instance network info cache due to event network-changed-d5ce89ff-d3c3-4f10-93f6-7872187ad2eb. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1626.784201] env[62813]: DEBUG oslo_concurrency.lockutils [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] Acquiring lock "refresh_cache-cd5b7232-5d47-43c6-874e-6f9e6b45f420" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.784201] env[62813]: DEBUG oslo_concurrency.lockutils [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] Acquired lock "refresh_cache-cd5b7232-5d47-43c6-874e-6f9e6b45f420" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.784201] env[62813]: DEBUG nova.network.neutron [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Refreshing network info cache for port d5ce89ff-d3c3-4f10-93f6-7872187ad2eb {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1627.102819] env[62813]: DEBUG nova.network.neutron [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Updated VIF entry in instance network info cache for port d5ce89ff-d3c3-4f10-93f6-7872187ad2eb. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1627.103230] env[62813]: DEBUG nova.network.neutron [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Updating instance_info_cache with network_info: [{"id": "d5ce89ff-d3c3-4f10-93f6-7872187ad2eb", "address": "fa:16:3e:70:24:f3", "network": {"id": "ad3d5c4f-8479-494f-bfd4-98c36a6f9707", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1741739100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "529545a4514b4322945fc7ad30099d9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ce89ff-d3", "ovs_interfaceid": "d5ce89ff-d3c3-4f10-93f6-7872187ad2eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.113979] env[62813]: DEBUG oslo_concurrency.lockutils [req-5f864bca-f83f-4c54-9b2d-2e2cf25eff11 req-26e04e0f-bc9a-4461-a34b-8a069d03654d service nova] Releasing lock "refresh_cache-cd5b7232-5d47-43c6-874e-6f9e6b45f420" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.163644] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.164051] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.048095] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.048438] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.158933] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.181347] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.181565] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.192401] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.192613] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.192783] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.192937] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1629.194453] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08eb3ab1-5dad-484b-8aac-10bd7d64bca4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.203758] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b089107-145d-4d8a-b2ff-f52680ddd333 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.209491] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.221062] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9e4ee6-cbc4-485e-bf4c-316a878e5393 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.228925] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4fef1a-70fc-4808-8ab6-9efc004bfd40 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.258389] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180767MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1629.258549] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.258755] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.331708] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.331887] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332080] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332314] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332398] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332518] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332637] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332756] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332873] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.332991] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1629.344792] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 401a154d-ff81-4c5d-9860-eae30f7a2171 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.357133] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.367917] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.377804] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d43e416f-bdd1-49e1-aebd-838b319fc047 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.387554] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.398509] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.398726] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1629.398905] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1629.583277] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6865eb4-8ba0-42da-991d-b6f786c5d09f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.591628] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e50beb-3bbc-436c-9590-2920fe4d3ba5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.622961] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18ea238-64e2-42b8-994d-805fb0f8505b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.631548] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad843b60-9697-492b-8805-775ac772794f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.645986] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.654032] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1629.670016] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1629.670016] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.411s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.652631] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.343036] env[62813]: WARNING oslo_vmware.rw_handles [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1667.343036] env[62813]: ERROR oslo_vmware.rw_handles [ 1667.343036] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1667.344747] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1667.345629] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Copying Virtual Disk [datastore2] vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/1338e9b1-d69d-4847-a35b-6d7d40e0c512/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1667.345629] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-164da8e4-a085-48ab-a60a-616855b53700 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.355122] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1667.355122] env[62813]: value = "task-4267731" [ 1667.355122] env[62813]: _type = "Task" [ 1667.355122] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.363905] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.865891] env[62813]: DEBUG oslo_vmware.exceptions [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1667.866132] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.866722] env[62813]: ERROR nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1667.866722] env[62813]: Faults: ['InvalidArgument'] [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Traceback (most recent call last): [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] yield resources [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self.driver.spawn(context, instance, image_meta, [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self._fetch_image_if_missing(context, vi) [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] image_cache(vi, tmp_image_ds_loc) [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] vm_util.copy_virtual_disk( [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] session._wait_for_task(vmdk_copy_task) [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] return self.wait_for_task(task_ref) [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] return evt.wait() [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] result = hub.switch() [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] return self.greenlet.switch() [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self.f(*self.args, **self.kw) [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] raise exceptions.translate_fault(task_info.error) [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Faults: ['InvalidArgument'] [ 1667.866722] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] [ 1667.867877] env[62813]: INFO nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Terminating instance [ 1667.868662] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.868872] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1667.869135] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-839a7a28-b2e7-4746-8afe-74d5139a7fb0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.871369] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1667.871566] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1667.872298] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e539f068-0096-4a20-99d7-af134b5bb126 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.879228] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1667.879455] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f1cb272-2eb3-4d28-a24d-6f820df3565d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.881737] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1667.881910] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1667.882907] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27b11e27-d879-4e79-8180-029ecb113cb4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.887744] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Waiting for the task: (returnval){ [ 1667.887744] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ce8c30-6e87-87c0-a2f5-90412a37e54d" [ 1667.887744] env[62813]: _type = "Task" [ 1667.887744] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.895311] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ce8c30-6e87-87c0-a2f5-90412a37e54d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.951117] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1667.951408] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1667.951644] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleting the datastore file [datastore2] 5655255a-1d03-4854-b8ad-d77643f9b9c6 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1667.951978] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cef2325e-db5c-4dce-b4b1-4db4c8a6a91c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.958577] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1667.958577] env[62813]: value = "task-4267733" [ 1667.958577] env[62813]: _type = "Task" [ 1667.958577] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.966504] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.400789] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1668.401135] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Creating directory with path [datastore2] vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1668.401397] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-126d556f-2577-4f04-88de-d3aca539b638 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.417023] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Created directory with path [datastore2] vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1668.417023] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Fetch image to [datastore2] vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1668.417023] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1668.417023] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9b35ae-297b-4150-a901-214413873792 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.423264] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e666c367-5b57-4fec-bc2c-5607e1ec5fde {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.433535] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7779d2f-02ae-47e5-9a02-df95f1d2ec11 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.474982] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f1ca01-01ee-486b-bdf3-688753ade853 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.483747] env[62813]: DEBUG oslo_vmware.api [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083387} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.483966] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5ad2ed0a-3590-47a8-8b15-5c29248bc685 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.485675] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1668.485865] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1668.486056] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1668.486240] env[62813]: INFO nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1668.488367] env[62813]: DEBUG nova.compute.claims [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1668.488593] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.488825] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.509916] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1668.710501] env[62813]: DEBUG oslo_vmware.rw_handles [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1668.769796] env[62813]: DEBUG oslo_vmware.rw_handles [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1668.769985] env[62813]: DEBUG oslo_vmware.rw_handles [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1668.791152] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6e4159-2e28-40e6-963d-8be96dec11a7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.799288] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134b180e-91a1-44ca-a106-22bc9b410cf7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.829268] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb9bb69-675d-4b0c-8048-5e8c8c890da1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.836733] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c42917b-306a-4428-9afb-25ff885a1828 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.849911] env[62813]: DEBUG nova.compute.provider_tree [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.860378] env[62813]: DEBUG nova.scheduler.client.report [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1668.877620] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.388s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.877620] env[62813]: ERROR nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1668.877620] env[62813]: Faults: ['InvalidArgument'] [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Traceback (most recent call last): [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self.driver.spawn(context, instance, image_meta, [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self._fetch_image_if_missing(context, vi) [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] image_cache(vi, tmp_image_ds_loc) [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] vm_util.copy_virtual_disk( [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] session._wait_for_task(vmdk_copy_task) [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] return self.wait_for_task(task_ref) [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] return evt.wait() [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] result = hub.switch() [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] return self.greenlet.switch() [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] self.f(*self.args, **self.kw) [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] raise exceptions.translate_fault(task_info.error) [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Faults: ['InvalidArgument'] [ 1668.877620] env[62813]: ERROR nova.compute.manager [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] [ 1668.878823] env[62813]: DEBUG nova.compute.utils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1668.879617] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Build of instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 was re-scheduled: A specified parameter was not correct: fileType [ 1668.879617] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1668.880055] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1668.880243] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1668.880431] env[62813]: DEBUG nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1668.880620] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1669.355378] env[62813]: DEBUG nova.network.neutron [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.370829] env[62813]: INFO nova.compute.manager [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Took 0.49 seconds to deallocate network for instance. [ 1669.479675] env[62813]: INFO nova.scheduler.client.report [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleted allocations for instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 [ 1669.502256] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2402d9c7-2c1e-4fa0-8e89-19e22d6cb99c tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.209s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.503472] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.959s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.503704] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "5655255a-1d03-4854-b8ad-d77643f9b9c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.503920] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.504104] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.507377] env[62813]: INFO nova.compute.manager [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Terminating instance [ 1669.509325] env[62813]: DEBUG nova.compute.manager [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1669.509426] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1669.509929] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f74c47b-1fc3-4d56-b353-223fcdb7d6bc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.519328] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fac1575-04d2-4321-bddf-4fb274065eb8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.530949] env[62813]: DEBUG nova.compute.manager [None req-3eb29875-9c29-496b-8288-90fc8c64784d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: 401a154d-ff81-4c5d-9860-eae30f7a2171] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1669.553040] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5655255a-1d03-4854-b8ad-d77643f9b9c6 could not be found. [ 1669.553316] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1669.553448] env[62813]: INFO nova.compute.manager [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1669.553709] env[62813]: DEBUG oslo.service.loopingcall [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1669.554982] env[62813]: DEBUG nova.compute.manager [-] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1669.555106] env[62813]: DEBUG nova.network.neutron [-] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1669.557190] env[62813]: DEBUG nova.compute.manager [None req-3eb29875-9c29-496b-8288-90fc8c64784d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] [instance: 401a154d-ff81-4c5d-9860-eae30f7a2171] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1669.583546] env[62813]: DEBUG nova.network.neutron [-] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.590158] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3eb29875-9c29-496b-8288-90fc8c64784d tempest-ServerDiskConfigTestJSON-1991547234 tempest-ServerDiskConfigTestJSON-1991547234-project-member] Lock "401a154d-ff81-4c5d-9860-eae30f7a2171" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.134s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.594265] env[62813]: INFO nova.compute.manager [-] [instance: 5655255a-1d03-4854-b8ad-d77643f9b9c6] Took 0.04 seconds to deallocate network for instance. [ 1669.602217] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1669.658152] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.658424] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.660213] env[62813]: INFO nova.compute.claims [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1669.703472] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e4ca4bb4-2199-4ed2-8535-893d4a3cbfbe tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "5655255a-1d03-4854-b8ad-d77643f9b9c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.893913] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7137a4b3-3c20-49c4-ae50-6136ff51972d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.902183] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bac096-d683-4940-8517-fa4fe5685200 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.933538] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3283f37b-b5c8-4cd3-bad1-6084ec740feb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.941606] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de6df4b-1cd9-4dbc-b4c1-8f0e54e28b75 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.955274] env[62813]: DEBUG nova.compute.provider_tree [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1669.964593] env[62813]: DEBUG nova.scheduler.client.report [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1669.978237] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.978753] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1670.012075] env[62813]: DEBUG nova.compute.utils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1670.013537] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1670.013749] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1670.022574] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1670.075990] env[62813]: DEBUG nova.policy [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d821f55e50c407d84b1c210c846e5b0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4d879519c9c46cd907a58be6fd316ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1670.091744] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1670.116949] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1670.117214] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1670.117372] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1670.117595] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1670.117752] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1670.117903] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1670.118130] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1670.118293] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1670.118473] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1670.118662] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1670.118842] env[62813]: DEBUG nova.virt.hardware [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1670.119733] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6546b8-7392-4bcd-a636-84650b11ff46 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.128308] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbccf1ce-ce42-42de-abfd-e4faf6b83f2a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.394661] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Successfully created port: 12fcf3c5-922d-4a45-979f-489f25d27d2c {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.179508] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Successfully updated port: 12fcf3c5-922d-4a45-979f-489f25d27d2c {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1671.224275] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "refresh_cache-0d095679-87c7-46f6-8869-42b0f22127e8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.224275] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquired lock "refresh_cache-0d095679-87c7-46f6-8869-42b0f22127e8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.224436] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1671.268880] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1671.415202] env[62813]: DEBUG nova.compute.manager [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Received event network-vif-plugged-12fcf3c5-922d-4a45-979f-489f25d27d2c {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1671.415501] env[62813]: DEBUG oslo_concurrency.lockutils [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] Acquiring lock "0d095679-87c7-46f6-8869-42b0f22127e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.415732] env[62813]: DEBUG oslo_concurrency.lockutils [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] Lock "0d095679-87c7-46f6-8869-42b0f22127e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.415901] env[62813]: DEBUG oslo_concurrency.lockutils [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] Lock "0d095679-87c7-46f6-8869-42b0f22127e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.416082] env[62813]: DEBUG nova.compute.manager [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] No waiting events found dispatching network-vif-plugged-12fcf3c5-922d-4a45-979f-489f25d27d2c {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1671.416249] env[62813]: WARNING nova.compute.manager [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Received unexpected event network-vif-plugged-12fcf3c5-922d-4a45-979f-489f25d27d2c for instance with vm_state building and task_state spawning. [ 1671.416405] env[62813]: DEBUG nova.compute.manager [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Received event network-changed-12fcf3c5-922d-4a45-979f-489f25d27d2c {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1671.416552] env[62813]: DEBUG nova.compute.manager [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Refreshing instance network info cache due to event network-changed-12fcf3c5-922d-4a45-979f-489f25d27d2c. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1671.416716] env[62813]: DEBUG oslo_concurrency.lockutils [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] Acquiring lock "refresh_cache-0d095679-87c7-46f6-8869-42b0f22127e8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.532645] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Updating instance_info_cache with network_info: [{"id": "12fcf3c5-922d-4a45-979f-489f25d27d2c", "address": "fa:16:3e:1f:bf:e6", "network": {"id": "ea9c716c-766a-463f-94ea-71761f08b748", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-608845599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4d879519c9c46cd907a58be6fd316ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fcf3c5-92", "ovs_interfaceid": "12fcf3c5-922d-4a45-979f-489f25d27d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.546021] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Releasing lock "refresh_cache-0d095679-87c7-46f6-8869-42b0f22127e8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.546332] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance network_info: |[{"id": "12fcf3c5-922d-4a45-979f-489f25d27d2c", "address": "fa:16:3e:1f:bf:e6", "network": {"id": "ea9c716c-766a-463f-94ea-71761f08b748", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-608845599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4d879519c9c46cd907a58be6fd316ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fcf3c5-92", "ovs_interfaceid": "12fcf3c5-922d-4a45-979f-489f25d27d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1671.546628] env[62813]: DEBUG oslo_concurrency.lockutils [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] Acquired lock "refresh_cache-0d095679-87c7-46f6-8869-42b0f22127e8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.546811] env[62813]: DEBUG nova.network.neutron [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Refreshing network info cache for port 12fcf3c5-922d-4a45-979f-489f25d27d2c {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1671.547941] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:bf:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1195acd-707f-4bac-a99d-14db17a63802', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12fcf3c5-922d-4a45-979f-489f25d27d2c', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1671.555391] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Creating folder: Project (a4d879519c9c46cd907a58be6fd316ef). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1671.556672] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7a7f8f2-af26-4736-bf12-de9b429e4961 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.570913] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Created folder: Project (a4d879519c9c46cd907a58be6fd316ef) in parent group-v840812. [ 1671.570913] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Creating folder: Instances. Parent ref: group-v840907. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1671.571213] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-186eaf8d-ee55-4e1f-8ecf-785a37fc43e6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.581905] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Created folder: Instances in parent group-v840907. [ 1671.582182] env[62813]: DEBUG oslo.service.loopingcall [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1671.582379] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1671.582587] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1539bda3-c2d6-4f4e-9728-bcab490b8ee8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.605312] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1671.605312] env[62813]: value = "task-4267736" [ 1671.605312] env[62813]: _type = "Task" [ 1671.605312] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.613728] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267736, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.895755] env[62813]: DEBUG nova.network.neutron [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Updated VIF entry in instance network info cache for port 12fcf3c5-922d-4a45-979f-489f25d27d2c. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1671.896141] env[62813]: DEBUG nova.network.neutron [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Updating instance_info_cache with network_info: [{"id": "12fcf3c5-922d-4a45-979f-489f25d27d2c", "address": "fa:16:3e:1f:bf:e6", "network": {"id": "ea9c716c-766a-463f-94ea-71761f08b748", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-608845599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4d879519c9c46cd907a58be6fd316ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fcf3c5-92", "ovs_interfaceid": "12fcf3c5-922d-4a45-979f-489f25d27d2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.909156] env[62813]: DEBUG oslo_concurrency.lockutils [req-cc794b4c-2993-48e2-ab16-14c2b0653928 req-329dab66-a508-4ab7-804b-82eb7e1ea022 service nova] Releasing lock "refresh_cache-0d095679-87c7-46f6-8869-42b0f22127e8" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.116914] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267736, 'name': CreateVM_Task, 'duration_secs': 0.318882} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.117161] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1672.117980] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.118168] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.118549] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1672.118874] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d838a09e-4759-4262-826f-b24771eca730 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.124536] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Waiting for the task: (returnval){ [ 1672.124536] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]523421ec-2029-efac-56ca-6f7089756eab" [ 1672.124536] env[62813]: _type = "Task" [ 1672.124536] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.133076] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]523421ec-2029-efac-56ca-6f7089756eab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.636031] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.636031] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1672.636437] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.165636] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1679.165636] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1681.164595] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.163838] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1686.165051] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1686.165051] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1686.165051] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1686.187269] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.187440] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.187598] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.187950] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188140] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188275] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188401] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188525] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188685] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188847] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1686.188976] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1687.163521] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1688.160291] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.163411] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.165053] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.176014] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.176297] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.176535] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.176727] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1691.177933] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3669be-494b-41c7-ad0b-732e496e804c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.186922] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e89f415-91d5-4976-bf26-3dec69923e79 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.200891] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ec2f23-9b17-4d61-bb85-c17635aff5e6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.207261] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c316e7ec-028e-4218-9708-07660608705e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.237661] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180738MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1691.237855] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.237993] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.313026] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.313191] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.313324] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.313457] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.313580] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.313702] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.313828] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.314023] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.314118] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.314243] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1691.330310] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1691.340655] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d43e416f-bdd1-49e1-aebd-838b319fc047 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1691.350616] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1691.359994] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1691.360230] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1691.360378] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1691.537404] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adb1402-2489-4de3-a70e-a233320122f0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.545919] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f9705b-a6cb-49e7-be9a-ab82d605ec5c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.577374] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72765fbf-7559-4fe5-9934-c8ea4a10b06e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.584948] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9d6d97-f56d-4c0f-b7f2-e262ff61534d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.597931] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1691.606998] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1691.621386] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1691.621574] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.384s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.135992] env[62813]: DEBUG oslo_concurrency.lockutils [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "0d095679-87c7-46f6-8869-42b0f22127e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.622107] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.835112] env[62813]: WARNING oslo_vmware.rw_handles [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1714.835112] env[62813]: ERROR oslo_vmware.rw_handles [ 1714.835112] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1714.837214] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1714.837452] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Copying Virtual Disk [datastore2] vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/e4968733-3214-43c3-a8fd-51743fa72615/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1714.837760] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f51f515-13be-4cb2-9c33-13247e73611b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.846832] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Waiting for the task: (returnval){ [ 1714.846832] env[62813]: value = "task-4267737" [ 1714.846832] env[62813]: _type = "Task" [ 1714.846832] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.855284] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Task: {'id': task-4267737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.357398] env[62813]: DEBUG oslo_vmware.exceptions [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1715.357639] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.358268] env[62813]: ERROR nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1715.358268] env[62813]: Faults: ['InvalidArgument'] [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Traceback (most recent call last): [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] yield resources [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self.driver.spawn(context, instance, image_meta, [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self._fetch_image_if_missing(context, vi) [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] image_cache(vi, tmp_image_ds_loc) [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] vm_util.copy_virtual_disk( [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] session._wait_for_task(vmdk_copy_task) [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] return self.wait_for_task(task_ref) [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] return evt.wait() [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] result = hub.switch() [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] return self.greenlet.switch() [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self.f(*self.args, **self.kw) [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] raise exceptions.translate_fault(task_info.error) [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Faults: ['InvalidArgument'] [ 1715.358268] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] [ 1715.359330] env[62813]: INFO nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Terminating instance [ 1715.360195] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.360405] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.360643] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cdaa86f-3e37-4b3d-91a8-7cacaddfe2b5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.362979] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1715.363195] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1715.363902] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb366d0-df9b-403a-bb53-0615992614c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.370819] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1715.371052] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c84afff8-87f1-4305-bf72-ffa3cbd20356 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.373217] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.373394] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1715.374359] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f06f5d7d-d72b-4a15-8b27-2786ced5f7cd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.379398] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1715.379398] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]521ce59b-16e5-3c0c-1294-6a35b8412eb2" [ 1715.379398] env[62813]: _type = "Task" [ 1715.379398] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.386544] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]521ce59b-16e5-3c0c-1294-6a35b8412eb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.442919] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1715.443188] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1715.443369] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Deleting the datastore file [datastore2] 5d0e1cb1-9f54-4a76-960c-99d0803afd2e {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.443653] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fea07f21-b77c-4498-aa7d-9872e07a86b7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.451567] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Waiting for the task: (returnval){ [ 1715.451567] env[62813]: value = "task-4267739" [ 1715.451567] env[62813]: _type = "Task" [ 1715.451567] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.460121] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Task: {'id': task-4267739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.891384] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1715.891789] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating directory with path [datastore2] vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.891836] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c92650d-61da-411b-9209-d49288da87b8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.903491] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Created directory with path [datastore2] vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.903641] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Fetch image to [datastore2] vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1715.903790] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1715.904531] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0a3510-01bf-4859-9bab-13c089e15d92 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.912600] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd47facb-c53c-4b86-8477-0620b2c190a9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.922373] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85558bc0-4949-4f67-a35f-2cc92d9af6b3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.956960] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89349d41-0b54-47c1-bca8-3096cdaef161 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.964829] env[62813]: DEBUG oslo_vmware.api [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Task: {'id': task-4267739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065191} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.966499] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1715.966695] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1715.966872] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1715.967063] env[62813]: INFO nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1715.968921] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f78168b5-d6f8-4d25-a837-b14e3dbb9b05 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.970885] env[62813]: DEBUG nova.compute.claims [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1715.971064] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.971276] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.994293] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1716.112335] env[62813]: DEBUG oslo_vmware.rw_handles [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1716.172614] env[62813]: DEBUG oslo_vmware.rw_handles [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1716.172806] env[62813]: DEBUG oslo_vmware.rw_handles [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1716.249414] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9b8fe6-1ecc-40ef-96e6-1666d2ed38d7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.257698] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdfbafa-ad02-48d9-996f-1e59ee76857f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.288274] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a660a93-7728-49d2-b990-4c82b3f85f48 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.295832] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541f73ce-62a6-4b22-91af-bbdd1f1b205e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.309458] env[62813]: DEBUG nova.compute.provider_tree [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.318404] env[62813]: DEBUG nova.scheduler.client.report [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1716.332702] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.333289] env[62813]: ERROR nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1716.333289] env[62813]: Faults: ['InvalidArgument'] [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Traceback (most recent call last): [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self.driver.spawn(context, instance, image_meta, [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self._fetch_image_if_missing(context, vi) [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] image_cache(vi, tmp_image_ds_loc) [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] vm_util.copy_virtual_disk( [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] session._wait_for_task(vmdk_copy_task) [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] return self.wait_for_task(task_ref) [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] return evt.wait() [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] result = hub.switch() [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] return self.greenlet.switch() [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] self.f(*self.args, **self.kw) [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] raise exceptions.translate_fault(task_info.error) [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Faults: ['InvalidArgument'] [ 1716.333289] env[62813]: ERROR nova.compute.manager [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] [ 1716.334217] env[62813]: DEBUG nova.compute.utils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1716.335689] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Build of instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e was re-scheduled: A specified parameter was not correct: fileType [ 1716.335689] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1716.336094] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1716.336269] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1716.336440] env[62813]: DEBUG nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1716.336598] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1716.698975] env[62813]: DEBUG nova.network.neutron [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.710664] env[62813]: INFO nova.compute.manager [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Took 0.37 seconds to deallocate network for instance. [ 1716.817704] env[62813]: INFO nova.scheduler.client.report [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Deleted allocations for instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e [ 1716.845416] env[62813]: DEBUG oslo_concurrency.lockutils [None req-87f9260b-71ac-4e54-9c01-838cf9ae4496 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 620.260s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.846796] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 424.326s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.847096] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Acquiring lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.847402] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.847622] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.850101] env[62813]: INFO nova.compute.manager [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Terminating instance [ 1716.852338] env[62813]: DEBUG nova.compute.manager [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1716.852582] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1716.852877] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11050e24-2600-48b9-b422-7d1e99561d0f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.863013] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4974215a-5b9f-403f-a9ef-4387d462d199 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.875037] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1716.898755] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d0e1cb1-9f54-4a76-960c-99d0803afd2e could not be found. [ 1716.899102] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1716.899208] env[62813]: INFO nova.compute.manager [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1716.899449] env[62813]: DEBUG oslo.service.loopingcall [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.899695] env[62813]: DEBUG nova.compute.manager [-] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1716.899821] env[62813]: DEBUG nova.network.neutron [-] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1716.930595] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.931063] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.932641] env[62813]: INFO nova.compute.claims [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1716.935574] env[62813]: DEBUG nova.network.neutron [-] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.944331] env[62813]: INFO nova.compute.manager [-] [instance: 5d0e1cb1-9f54-4a76-960c-99d0803afd2e] Took 0.04 seconds to deallocate network for instance. [ 1717.057175] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d2afbddd-88fa-429b-b83b-41a6b35ea078 tempest-VolumesAdminNegativeTest-18192477 tempest-VolumesAdminNegativeTest-18192477-project-member] Lock "5d0e1cb1-9f54-4a76-960c-99d0803afd2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.160379] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac09653-d058-4052-b22f-417b12d0ce41 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.169400] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff41b7e-0f1c-41f4-ad99-98e48c872758 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.200093] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3100b59c-ca94-4b45-8fea-2c124ccf081c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.208758] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74c9aaf-ec83-4f0a-9b39-f3b11063b7d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.222406] env[62813]: DEBUG nova.compute.provider_tree [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1717.232053] env[62813]: DEBUG nova.scheduler.client.report [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1717.247174] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.247779] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1717.282711] env[62813]: DEBUG nova.compute.utils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1717.284250] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Not allocating networking since 'none' was specified. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1717.295838] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1717.360242] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1717.386661] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1717.386938] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1717.387126] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1717.387318] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1717.387483] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1717.387664] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1717.388093] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1717.388153] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1717.388338] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1717.388505] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1717.388687] env[62813]: DEBUG nova.virt.hardware [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1717.389760] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2627c5-e7cd-40d0-b893-31bbac81d688 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.397811] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2535ca-7a37-40ec-bdb8-97976741c55e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.411511] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance VIF info [] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1717.417077] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Creating folder: Project (604e2298da994ee78f0ba314e88f3908). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1717.417375] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c9607c3-ac09-4ba8-9b01-969e1dbfb904 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.427443] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Created folder: Project (604e2298da994ee78f0ba314e88f3908) in parent group-v840812. [ 1717.427623] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Creating folder: Instances. Parent ref: group-v840910. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1717.427849] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4937c87-be6e-43ca-9ecb-f09be991b966 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.437109] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Created folder: Instances in parent group-v840910. [ 1717.437345] env[62813]: DEBUG oslo.service.loopingcall [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1717.437548] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1717.437747] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8ddc88b-ef2d-4e8f-b0a7-097f7c00e996 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.453960] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1717.453960] env[62813]: value = "task-4267742" [ 1717.453960] env[62813]: _type = "Task" [ 1717.453960] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.461432] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267742, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.964452] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267742, 'name': CreateVM_Task, 'duration_secs': 0.284268} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.964731] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1717.964905] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.965079] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.965418] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1717.965664] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f60c4ac1-dc7f-433d-9314-c3738cb0481a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.970867] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Waiting for the task: (returnval){ [ 1717.970867] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5291f20c-89d9-3076-464d-cbe04c92e500" [ 1717.970867] env[62813]: _type = "Task" [ 1717.970867] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.979246] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5291f20c-89d9-3076-464d-cbe04c92e500, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.483194] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.483457] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1718.483677] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.372260] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.164624] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.164884] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1736.176934] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] There are 0 instances to clean {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1736.454652] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.454652] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.176601] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.176971] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1742.165626] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.163929] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.164105] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances with incomplete migration {{(pid=62813) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1745.175934] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1747.163870] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1747.164284] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1747.164284] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1747.186647] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.186824] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.186959] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187100] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187228] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187355] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187513] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187641] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187760] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187877] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1747.187996] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1748.541709] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1748.564547] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Getting list of instances from cluster (obj){ [ 1748.564547] env[62813]: value = "domain-c8" [ 1748.564547] env[62813]: _type = "ClusterComputeResource" [ 1748.564547] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1748.566496] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8713d76c-7192-4b78-af80-f8c78763801e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.583775] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Got total of 10 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1748.583962] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid b50a8094-fc39-420f-a1d0-a29b5ee29df2 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.584172] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid d40089e3-67b3-452e-a0d1-18d5def1ff34 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.584335] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.584495] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid c9402929-e845-416b-91e5-39d08ab90a2e {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.584649] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid d5f63ddc-e786-471d-a871-2ef878bd2455 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.584800] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 176d5151-358a-4b90-9aff-064aa9648618 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.584952] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 2c94570a-7bb0-4719-9982-0e7710470db1 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.585225] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid cd5b7232-5d47-43c6-874e-6f9e6b45f420 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.585405] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 0d095679-87c7-46f6-8869-42b0f22127e8 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.585559] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid f7777e07-72df-4af1-8f22-ccb71db0e06a {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1748.585901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.586156] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.586366] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.586573] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "c9402929-e845-416b-91e5-39d08ab90a2e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.586881] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "d5f63ddc-e786-471d-a871-2ef878bd2455" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.586957] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "176d5151-358a-4b90-9aff-064aa9648618" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.587141] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "2c94570a-7bb0-4719-9982-0e7710470db1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.587334] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.587529] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "0d095679-87c7-46f6-8869-42b0f22127e8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.587723] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.209746] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.160101] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.163704] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.164094] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.176764] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.177023] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.177206] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.177366] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1751.178555] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa65e0e-420b-42a2-9044-3f4b209487f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.187579] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b17581-b8a3-4855-b8d5-09f45df7cdbb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.203184] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e2ffea-528f-4ee7-b060-d02c4610f2f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.210071] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c93f66-ee18-499e-b63c-66cf09ed221d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.239408] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180725MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1751.239546] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.239707] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.347588] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.347755] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.347886] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348025] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348161] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348343] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348483] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348604] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348722] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.348840] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.361487] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1751.373007] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1751.383660] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1751.383886] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1751.384050] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1751.400817] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing inventories for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1751.414790] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating ProviderTree inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1751.414969] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1751.426171] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing aggregate associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, aggregates: None {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1751.445342] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing trait associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1751.600114] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96f0f71-3b79-42c5-a0f0-afcfd1d4e6cc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.607716] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11395e23-d725-4802-8acf-a7484227e271 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.637632] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5662750-fb33-4a97-a6ad-b00804ad808b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.644987] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ae4b5a-f6ca-4b5f-aa28-a6ae8f8610ff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.658481] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1751.667320] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1751.682197] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1751.682382] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.443s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.677733] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.163552] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1757.164644] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1764.436777] env[62813]: WARNING oslo_vmware.rw_handles [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1764.436777] env[62813]: ERROR oslo_vmware.rw_handles [ 1764.437489] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1764.439466] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1764.439719] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Copying Virtual Disk [datastore2] vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/8f749882-4ba2-4e92-858e-4beccc20ad08/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1764.440017] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97028fa1-cae6-46e4-a297-6156a1faca93 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.448207] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1764.448207] env[62813]: value = "task-4267743" [ 1764.448207] env[62813]: _type = "Task" [ 1764.448207] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.456805] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': task-4267743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.958840] env[62813]: DEBUG oslo_vmware.exceptions [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1764.959150] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.959787] env[62813]: ERROR nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1764.959787] env[62813]: Faults: ['InvalidArgument'] [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Traceback (most recent call last): [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] yield resources [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self.driver.spawn(context, instance, image_meta, [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self._fetch_image_if_missing(context, vi) [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] image_cache(vi, tmp_image_ds_loc) [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] vm_util.copy_virtual_disk( [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] session._wait_for_task(vmdk_copy_task) [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] return self.wait_for_task(task_ref) [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] return evt.wait() [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] result = hub.switch() [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] return self.greenlet.switch() [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self.f(*self.args, **self.kw) [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] raise exceptions.translate_fault(task_info.error) [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Faults: ['InvalidArgument'] [ 1764.959787] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] [ 1764.961071] env[62813]: INFO nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Terminating instance [ 1764.961798] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.962014] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1764.962270] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d27f10e3-df8e-4f60-85fb-d407e18aa7e5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.964664] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1764.964862] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1764.965622] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da855628-5024-496c-8390-6e6ca49db8c5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.973286] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1764.973561] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98671e59-696e-415c-93b3-9786de054cd4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.976092] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1764.976308] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1764.977369] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-060abac9-f6b1-4438-a363-98cb3691e494 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.983499] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Waiting for the task: (returnval){ [ 1764.983499] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52b71109-ee83-8016-bdbb-79692c2a6484" [ 1764.983499] env[62813]: _type = "Task" [ 1764.983499] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.992282] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52b71109-ee83-8016-bdbb-79692c2a6484, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.049218] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1765.049576] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1765.049783] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Deleting the datastore file [datastore2] b50a8094-fc39-420f-a1d0-a29b5ee29df2 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1765.050115] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c7f4eb0-ec60-48aa-89ad-db2268cfd5c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.056970] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for the task: (returnval){ [ 1765.056970] env[62813]: value = "task-4267745" [ 1765.056970] env[62813]: _type = "Task" [ 1765.056970] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.066279] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': task-4267745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.494074] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1765.494359] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Creating directory with path [datastore2] vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1765.494629] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ff1cd17-503d-44fd-981a-8f0d9cc4d50d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.506297] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Created directory with path [datastore2] vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1765.506520] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Fetch image to [datastore2] vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1765.506703] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1765.507502] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee60c9f-6665-4082-bbb1-9089ab163607 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.514809] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe322bb5-6e54-4d4c-95cd-e14d7d43bd67 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.524187] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048c732e-b3b2-439f-a1ad-c893dacff031 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.555288] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2130d0-ced7-4ee7-b4dc-5f4c3f03c432 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.567022] env[62813]: DEBUG oslo_vmware.api [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Task: {'id': task-4267745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077091} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.567238] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7304a957-cc69-4c9a-8fc4-66dc6fe2a631 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.568952] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1765.569164] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1765.569388] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1765.569513] env[62813]: INFO nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1765.571671] env[62813]: DEBUG nova.compute.claims [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1765.571845] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.572078] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.594950] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1765.653680] env[62813]: DEBUG oslo_vmware.rw_handles [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1765.713400] env[62813]: DEBUG oslo_vmware.rw_handles [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1765.713743] env[62813]: DEBUG oslo_vmware.rw_handles [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1765.832765] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015a31bb-8f1f-4eda-9f7e-3669436c960a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.840773] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0054b510-e82e-4448-bae0-5ea93cad04e9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.870691] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8027ba7-c029-4d62-8b74-93c330d759d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.878352] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ab39a9-c0a0-4be7-b3c0-6cb79ca6b86b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.891673] env[62813]: DEBUG nova.compute.provider_tree [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.901881] env[62813]: DEBUG nova.scheduler.client.report [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1765.917463] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.345s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.918027] env[62813]: ERROR nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1765.918027] env[62813]: Faults: ['InvalidArgument'] [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Traceback (most recent call last): [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self.driver.spawn(context, instance, image_meta, [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self._fetch_image_if_missing(context, vi) [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] image_cache(vi, tmp_image_ds_loc) [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] vm_util.copy_virtual_disk( [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] session._wait_for_task(vmdk_copy_task) [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] return self.wait_for_task(task_ref) [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] return evt.wait() [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] result = hub.switch() [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] return self.greenlet.switch() [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] self.f(*self.args, **self.kw) [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] raise exceptions.translate_fault(task_info.error) [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Faults: ['InvalidArgument'] [ 1765.918027] env[62813]: ERROR nova.compute.manager [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] [ 1765.919046] env[62813]: DEBUG nova.compute.utils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1765.920429] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Build of instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 was re-scheduled: A specified parameter was not correct: fileType [ 1765.920429] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1765.920838] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1765.921031] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1765.921214] env[62813]: DEBUG nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1765.921378] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1766.297040] env[62813]: DEBUG nova.network.neutron [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.313339] env[62813]: INFO nova.compute.manager [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Took 0.39 seconds to deallocate network for instance. [ 1766.421592] env[62813]: INFO nova.scheduler.client.report [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Deleted allocations for instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 [ 1766.445254] env[62813]: DEBUG oslo_concurrency.lockutils [None req-73d82da1-86a8-4154-a02c-c4064ae4677e tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.863s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.446454] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.946s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.446693] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Acquiring lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.446958] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.447068] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.449117] env[62813]: INFO nova.compute.manager [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Terminating instance [ 1766.450801] env[62813]: DEBUG nova.compute.manager [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1766.451009] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1766.451489] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9424ca35-e4c6-47ad-bff4-f24fb37089ca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.461558] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56072467-eea9-4e8f-ae45-3f615517a9f7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.475491] env[62813]: DEBUG nova.compute.manager [None req-4892e14c-74aa-487e-809f-6405e64ddb32 tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: d43e416f-bdd1-49e1-aebd-838b319fc047] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1766.495450] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b50a8094-fc39-420f-a1d0-a29b5ee29df2 could not be found. [ 1766.495843] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1766.495843] env[62813]: INFO nova.compute.manager [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1766.496322] env[62813]: DEBUG oslo.service.loopingcall [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.496381] env[62813]: DEBUG nova.compute.manager [-] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1766.496594] env[62813]: DEBUG nova.network.neutron [-] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1766.505700] env[62813]: DEBUG nova.compute.manager [None req-4892e14c-74aa-487e-809f-6405e64ddb32 tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: d43e416f-bdd1-49e1-aebd-838b319fc047] Instance disappeared before build. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1766.524592] env[62813]: DEBUG nova.network.neutron [-] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.539799] env[62813]: INFO nova.compute.manager [-] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] Took 0.04 seconds to deallocate network for instance. [ 1766.544873] env[62813]: DEBUG oslo_concurrency.lockutils [None req-4892e14c-74aa-487e-809f-6405e64ddb32 tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "d43e416f-bdd1-49e1-aebd-838b319fc047" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.696s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.554850] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1766.619950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.619950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.620854] env[62813]: INFO nova.compute.claims [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1766.678950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-f1ee70db-e49b-407c-9343-5878d4217fba tempest-SecurityGroupsTestJSON-1233471394 tempest-SecurityGroupsTestJSON-1233471394-project-member] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.232s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.679962] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.094s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.680182] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: b50a8094-fc39-420f-a1d0-a29b5ee29df2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1766.680368] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "b50a8094-fc39-420f-a1d0-a29b5ee29df2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.823211] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21d948b-e140-43b0-8c07-9d644bd31bc5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.831569] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c22921a-0f97-4fd8-a0b2-13f68b9b10f5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.862438] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abeca82f-9804-42e1-8bb4-6c7528a2248a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.869811] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a3fbe3-8142-4ccf-bb76-af1e7854578e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.882832] env[62813]: DEBUG nova.compute.provider_tree [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1766.893226] env[62813]: DEBUG nova.scheduler.client.report [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1766.928471] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.309s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.929032] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1766.968903] env[62813]: DEBUG nova.compute.utils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1766.970335] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Not allocating networking since 'none' was specified. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1766.981135] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1767.049160] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1767.076614] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1767.076858] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1767.077029] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1767.077222] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1767.077371] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1767.077521] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1767.077738] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1767.077900] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1767.078082] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1767.078253] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1767.078482] env[62813]: DEBUG nova.virt.hardware [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1767.079357] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d52f3b-0ee0-4ffb-b211-adfe65b0be14 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.087583] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab47a09-0867-4ff9-a832-ef9a5e5c2190 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.101103] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance VIF info [] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1767.106646] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Creating folder: Project (849d14857c1f45909b71dacb35ddd4a2). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1767.106906] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b219c884-22ad-4a16-a401-87356e97b311 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.115986] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Created folder: Project (849d14857c1f45909b71dacb35ddd4a2) in parent group-v840812. [ 1767.116213] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Creating folder: Instances. Parent ref: group-v840913. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1767.116453] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30fd0b46-8b37-4bdd-9206-fd5cc4371a31 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.124663] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Created folder: Instances in parent group-v840913. [ 1767.124923] env[62813]: DEBUG oslo.service.loopingcall [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.125141] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1767.125359] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9985f85-9037-43a1-b27f-07751f27cfb1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.143076] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1767.143076] env[62813]: value = "task-4267748" [ 1767.143076] env[62813]: _type = "Task" [ 1767.143076] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.151606] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267748, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.654091] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267748, 'name': CreateVM_Task, 'duration_secs': 0.277822} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.654445] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1767.654705] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.654869] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.655234] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1767.655520] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8b63cba-72cb-4bb7-b254-b9b0f08e4123 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.660485] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for the task: (returnval){ [ 1767.660485] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5256a5e5-59a3-2420-4fe4-9b968c5ca786" [ 1767.660485] env[62813]: _type = "Task" [ 1767.660485] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.668312] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5256a5e5-59a3-2420-4fe4-9b968c5ca786, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.171261] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.171529] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1768.171741] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.730809] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "63766a48-0d55-4261-9949-be3335ae8d0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.998562] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "dcc68892-3e75-4da9-975a-5b41c69205f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.998562] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.172324] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.172698] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1803.164403] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1805.163607] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1807.165616] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1807.165964] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1807.165964] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1807.188587] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.188812] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.188902] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189039] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189177] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189299] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189421] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189542] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189661] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189781] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1807.189909] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1810.164918] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.164270] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.176873] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.177269] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.177320] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.177468] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1811.178632] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02440e20-945a-45e2-a402-8656a3ddbdd6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.187685] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da4a263-bfbc-48f0-ab2d-af9be141eac1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.201713] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71d08e8-48d8-41ce-896e-b6dd3e9f8b7f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.208255] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3620cec-97f5-46e8-8b79-4d019f165c3f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.238670] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180766MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1811.238890] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.239061] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.312657] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.312808] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.312935] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313075] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313197] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313317] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313436] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313553] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313669] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.313783] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1811.324826] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1811.335693] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1811.345667] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1811.345901] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1811.346081] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1811.498124] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d4b6cc-322c-4caf-8626-2980fe6e063e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.506229] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41d24cb-d35f-4cd8-8645-12429c25e49d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.535470] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8969756a-a4fe-4032-9521-b4478cb4a0c8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.543487] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d9791b-c287-4015-a602-ac9c94bc3b18 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.557916] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.566939] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1811.580759] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1811.580955] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.342s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.576871] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.577257] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.872062] env[62813]: WARNING oslo_vmware.rw_handles [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1814.872062] env[62813]: ERROR oslo_vmware.rw_handles [ 1814.872807] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1814.874703] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1814.875029] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Copying Virtual Disk [datastore2] vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/32a0774a-1b53-4279-8f8f-4077bdccc2e7/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1814.875259] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82992c39-727b-44c9-977c-daf001c93286 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.884308] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Waiting for the task: (returnval){ [ 1814.884308] env[62813]: value = "task-4267749" [ 1814.884308] env[62813]: _type = "Task" [ 1814.884308] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.893027] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Task: {'id': task-4267749, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.394657] env[62813]: DEBUG oslo_vmware.exceptions [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1815.394959] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.395547] env[62813]: ERROR nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1815.395547] env[62813]: Faults: ['InvalidArgument'] [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Traceback (most recent call last): [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] yield resources [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self.driver.spawn(context, instance, image_meta, [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self._fetch_image_if_missing(context, vi) [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] image_cache(vi, tmp_image_ds_loc) [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] vm_util.copy_virtual_disk( [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] session._wait_for_task(vmdk_copy_task) [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] return self.wait_for_task(task_ref) [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] return evt.wait() [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] result = hub.switch() [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] return self.greenlet.switch() [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self.f(*self.args, **self.kw) [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] raise exceptions.translate_fault(task_info.error) [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Faults: ['InvalidArgument'] [ 1815.395547] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] [ 1815.396571] env[62813]: INFO nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Terminating instance [ 1815.398050] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.398050] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1815.398050] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a492f1b7-c074-4ffb-b7d5-b5fe058ecbd2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.400133] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1815.400328] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1815.401269] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fdb843-8461-470e-9680-39bace7437f9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.407910] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1815.408150] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56c8cec1-3e49-4410-9121-a34230e3b693 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.410359] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1815.410534] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1815.411493] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6348be39-fe3c-4780-bce5-114704d6f23e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.416271] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 1815.416271] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52d913f6-fd58-9fa0-1570-8b68339b6d20" [ 1815.416271] env[62813]: _type = "Task" [ 1815.416271] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.424164] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52d913f6-fd58-9fa0-1570-8b68339b6d20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.476062] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1815.476300] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1815.476473] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Deleting the datastore file [datastore2] d40089e3-67b3-452e-a0d1-18d5def1ff34 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1815.476751] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0725a0da-1362-45bb-9763-0c5196849b79 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.483404] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Waiting for the task: (returnval){ [ 1815.483404] env[62813]: value = "task-4267751" [ 1815.483404] env[62813]: _type = "Task" [ 1815.483404] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.491138] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Task: {'id': task-4267751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.927993] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1815.928393] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating directory with path [datastore2] vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1815.928567] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c98961a-8ce5-4163-ba40-273d2debcf04 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.940984] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Created directory with path [datastore2] vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1815.942054] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Fetch image to [datastore2] vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1815.942054] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1815.943162] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129cd2a2-cc37-4dc2-86c7-6e682484a97b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.949581] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adc6127-41cc-4b6a-acbc-9d18d935212e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.959152] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6348f9d0-85bf-4f44-8677-cf510f7ca81c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.994015] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aeb693c-21b6-4067-ac4b-9ffbf43917c9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.002835] env[62813]: DEBUG oslo_vmware.api [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Task: {'id': task-4267751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069339} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.003379] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1816.003568] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1816.003740] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1816.003914] env[62813]: INFO nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1816.005468] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1fc747b9-1761-4700-80ed-d6789527ef01 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.007755] env[62813]: DEBUG nova.compute.claims [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1816.007755] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.007929] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.031288] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1816.090024] env[62813]: DEBUG oslo_vmware.rw_handles [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1816.148131] env[62813]: DEBUG oslo_vmware.rw_handles [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1816.148361] env[62813]: DEBUG oslo_vmware.rw_handles [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1816.273754] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e326a9b-201a-418d-ad13-eb6d810a50ba {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.282284] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec4381d-7e5f-4eb7-a089-9c36bc1a5bee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.312184] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c02d1f-847c-4f73-bf59-0b32274d5a48 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.319653] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00dfbadf-b0fc-4b58-b88c-fe1471f6df8c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.333505] env[62813]: DEBUG nova.compute.provider_tree [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.341945] env[62813]: DEBUG nova.scheduler.client.report [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1816.355479] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.347s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.356029] env[62813]: ERROR nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1816.356029] env[62813]: Faults: ['InvalidArgument'] [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Traceback (most recent call last): [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self.driver.spawn(context, instance, image_meta, [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self._fetch_image_if_missing(context, vi) [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] image_cache(vi, tmp_image_ds_loc) [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] vm_util.copy_virtual_disk( [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] session._wait_for_task(vmdk_copy_task) [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] return self.wait_for_task(task_ref) [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] return evt.wait() [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] result = hub.switch() [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] return self.greenlet.switch() [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] self.f(*self.args, **self.kw) [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] raise exceptions.translate_fault(task_info.error) [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Faults: ['InvalidArgument'] [ 1816.356029] env[62813]: ERROR nova.compute.manager [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] [ 1816.356866] env[62813]: DEBUG nova.compute.utils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1816.358797] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Build of instance d40089e3-67b3-452e-a0d1-18d5def1ff34 was re-scheduled: A specified parameter was not correct: fileType [ 1816.358797] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1816.359229] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1816.359412] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1816.359586] env[62813]: DEBUG nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1816.359751] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1816.764595] env[62813]: DEBUG nova.network.neutron [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.777927] env[62813]: INFO nova.compute.manager [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Took 0.42 seconds to deallocate network for instance. [ 1816.885010] env[62813]: INFO nova.scheduler.client.report [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Deleted allocations for instance d40089e3-67b3-452e-a0d1-18d5def1ff34 [ 1816.906171] env[62813]: DEBUG oslo_concurrency.lockutils [None req-fa1c6f43-7654-4a92-ac76-d0bccd929bed tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.549s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.907501] env[62813]: DEBUG oslo_concurrency.lockutils [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.343s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.907649] env[62813]: DEBUG oslo_concurrency.lockutils [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Acquiring lock "d40089e3-67b3-452e-a0d1-18d5def1ff34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.907905] env[62813]: DEBUG oslo_concurrency.lockutils [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.908581] env[62813]: DEBUG oslo_concurrency.lockutils [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.910486] env[62813]: INFO nova.compute.manager [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Terminating instance [ 1816.912331] env[62813]: DEBUG nova.compute.manager [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1816.912528] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1816.913296] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94647fc6-16c9-418c-a12c-f1c91553fd54 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.925130] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8436f6d5-3626-402e-b3b9-036bfd09d878 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.937215] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1816.961015] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d40089e3-67b3-452e-a0d1-18d5def1ff34 could not be found. [ 1816.961259] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1816.961446] env[62813]: INFO nova.compute.manager [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1816.961710] env[62813]: DEBUG oslo.service.loopingcall [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1816.961943] env[62813]: DEBUG nova.compute.manager [-] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1816.962055] env[62813]: DEBUG nova.network.neutron [-] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1816.988591] env[62813]: DEBUG nova.network.neutron [-] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.994226] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.994485] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.995968] env[62813]: INFO nova.compute.claims [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1817.000145] env[62813]: INFO nova.compute.manager [-] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] Took 0.04 seconds to deallocate network for instance. [ 1817.101407] env[62813]: DEBUG oslo_concurrency.lockutils [None req-12b75fa2-c40a-4d84-9d3a-b0cd8eff949c tempest-ListServerFiltersTestJSON-1439601628 tempest-ListServerFiltersTestJSON-1439601628-project-member] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.103560] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 68.517s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.103788] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d40089e3-67b3-452e-a0d1-18d5def1ff34] During sync_power_state the instance has a pending task (deleting). Skip. [ 1817.103997] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "d40089e3-67b3-452e-a0d1-18d5def1ff34" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.163980] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1817.222469] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27eb7502-94ea-4286-ba57-7cdac6bcf004 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.232784] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d00716-7c33-47a7-999a-8ca7f85c1222 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.264867] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ddcb26-328f-46a9-8eda-12f886f66ca6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.272784] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3b19b5-8d24-4817-9f73-2c3dec189981 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.286227] env[62813]: DEBUG nova.compute.provider_tree [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1817.297161] env[62813]: DEBUG nova.scheduler.client.report [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1817.311854] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.312425] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1817.349780] env[62813]: DEBUG nova.compute.utils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1817.351653] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1817.351844] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1817.361703] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1817.411942] env[62813]: DEBUG nova.policy [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e577489b4e784e5abaa6a755ab08a2c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d2731f99cdc4553bd301f33c4df1517', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1817.433033] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1817.462745] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1817.462993] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1817.463176] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1817.463365] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1817.463513] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1817.463661] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1817.463878] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1817.464047] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1817.464286] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1817.464485] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1817.464669] env[62813]: DEBUG nova.virt.hardware [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1817.465637] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de019c7-70bc-405c-a515-aad43ca2f9e9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.474223] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47273b8-6d29-4ac4-8e6f-b66aad86e82b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.803183] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Successfully created port: 3a28b8e3-3734-4d5c-bfc9-33ff020055a2 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1818.473328] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Successfully updated port: 3a28b8e3-3734-4d5c-bfc9-33ff020055a2 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1818.485199] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "refresh_cache-1feb4a0d-0b0f-434e-91e9-321a48fb166c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.485381] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "refresh_cache-1feb4a0d-0b0f-434e-91e9-321a48fb166c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.485509] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1818.769736] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1818.829154] env[62813]: DEBUG nova.compute.manager [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Received event network-vif-plugged-3a28b8e3-3734-4d5c-bfc9-33ff020055a2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1818.829431] env[62813]: DEBUG oslo_concurrency.lockutils [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] Acquiring lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.829701] env[62813]: DEBUG oslo_concurrency.lockutils [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.829930] env[62813]: DEBUG oslo_concurrency.lockutils [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.830277] env[62813]: DEBUG nova.compute.manager [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] No waiting events found dispatching network-vif-plugged-3a28b8e3-3734-4d5c-bfc9-33ff020055a2 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1818.830557] env[62813]: WARNING nova.compute.manager [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Received unexpected event network-vif-plugged-3a28b8e3-3734-4d5c-bfc9-33ff020055a2 for instance with vm_state building and task_state spawning. [ 1818.830629] env[62813]: DEBUG nova.compute.manager [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Received event network-changed-3a28b8e3-3734-4d5c-bfc9-33ff020055a2 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1818.830808] env[62813]: DEBUG nova.compute.manager [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Refreshing instance network info cache due to event network-changed-3a28b8e3-3734-4d5c-bfc9-33ff020055a2. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1818.830950] env[62813]: DEBUG oslo_concurrency.lockutils [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] Acquiring lock "refresh_cache-1feb4a0d-0b0f-434e-91e9-321a48fb166c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.964191] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Updating instance_info_cache with network_info: [{"id": "3a28b8e3-3734-4d5c-bfc9-33ff020055a2", "address": "fa:16:3e:1c:4a:30", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a28b8e3-37", "ovs_interfaceid": "3a28b8e3-3734-4d5c-bfc9-33ff020055a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.978977] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "refresh_cache-1feb4a0d-0b0f-434e-91e9-321a48fb166c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.979336] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance network_info: |[{"id": "3a28b8e3-3734-4d5c-bfc9-33ff020055a2", "address": "fa:16:3e:1c:4a:30", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a28b8e3-37", "ovs_interfaceid": "3a28b8e3-3734-4d5c-bfc9-33ff020055a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1818.979788] env[62813]: DEBUG oslo_concurrency.lockutils [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] Acquired lock "refresh_cache-1feb4a0d-0b0f-434e-91e9-321a48fb166c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.980020] env[62813]: DEBUG nova.network.neutron [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Refreshing network info cache for port 3a28b8e3-3734-4d5c-bfc9-33ff020055a2 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1818.981096] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:4a:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a28b8e3-3734-4d5c-bfc9-33ff020055a2', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1818.988548] env[62813]: DEBUG oslo.service.loopingcall [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.991939] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1818.992390] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00d0f528-effb-4a38-887e-ba0e45f2f1c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.013231] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1819.013231] env[62813]: value = "task-4267752" [ 1819.013231] env[62813]: _type = "Task" [ 1819.013231] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.021117] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267752, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.351275] env[62813]: DEBUG nova.network.neutron [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Updated VIF entry in instance network info cache for port 3a28b8e3-3734-4d5c-bfc9-33ff020055a2. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1819.351647] env[62813]: DEBUG nova.network.neutron [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Updating instance_info_cache with network_info: [{"id": "3a28b8e3-3734-4d5c-bfc9-33ff020055a2", "address": "fa:16:3e:1c:4a:30", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a28b8e3-37", "ovs_interfaceid": "3a28b8e3-3734-4d5c-bfc9-33ff020055a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.362032] env[62813]: DEBUG oslo_concurrency.lockutils [req-a5564f6f-0839-4aa2-91a3-5b4d93365d86 req-33be7044-9873-4842-9c2f-b16d9081226e service nova] Releasing lock "refresh_cache-1feb4a0d-0b0f-434e-91e9-321a48fb166c" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.523515] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267752, 'name': CreateVM_Task, 'duration_secs': 0.307132} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.523850] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1819.524331] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.524506] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.524855] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1819.525129] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de4e04f-8af9-49bf-a376-05af5f4ad0d9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.530024] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 1819.530024] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52bba5a7-61f1-fb7f-8ed9-7b7947993820" [ 1819.530024] env[62813]: _type = "Task" [ 1819.530024] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.538089] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52bba5a7-61f1-fb7f-8ed9-7b7947993820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.041481] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.041765] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1820.041985] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.827557] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.827927] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.928023] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.952124] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquiring lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.952548] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.164629] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1861.164934] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1864.473054] env[62813]: WARNING oslo_vmware.rw_handles [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1864.473054] env[62813]: ERROR oslo_vmware.rw_handles [ 1864.473749] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1864.476784] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1864.476896] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Copying Virtual Disk [datastore2] vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/59972425-5e2c-45b8-87ff-da75289f0da9/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1864.477242] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3346c9de-db67-437a-94a9-fc019d5074fb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.490068] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 1864.490068] env[62813]: value = "task-4267753" [ 1864.490068] env[62813]: _type = "Task" [ 1864.490068] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.497421] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': task-4267753, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.998712] env[62813]: DEBUG oslo_vmware.exceptions [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1864.999020] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.999643] env[62813]: ERROR nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1864.999643] env[62813]: Faults: ['InvalidArgument'] [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Traceback (most recent call last): [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] yield resources [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self.driver.spawn(context, instance, image_meta, [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self._fetch_image_if_missing(context, vi) [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] image_cache(vi, tmp_image_ds_loc) [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] vm_util.copy_virtual_disk( [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] session._wait_for_task(vmdk_copy_task) [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] return self.wait_for_task(task_ref) [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] return evt.wait() [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] result = hub.switch() [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] return self.greenlet.switch() [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self.f(*self.args, **self.kw) [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] raise exceptions.translate_fault(task_info.error) [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Faults: ['InvalidArgument'] [ 1864.999643] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] [ 1865.001225] env[62813]: INFO nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Terminating instance [ 1865.001659] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.001939] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1865.002216] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2845859b-1f1e-40fe-b4cb-83a89554d2d3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.004697] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1865.004905] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1865.005642] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c5aff3-e5bd-4f78-bebc-faa9f6bb735a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.012502] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1865.012780] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-615ea256-3556-43d3-a4ec-d3aa1c6e3e52 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.014981] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1865.015180] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1865.016147] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ceeb62d-e0ea-4c13-bcab-f8c39f67d6b2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.021191] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Waiting for the task: (returnval){ [ 1865.021191] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52e1062d-7f99-7066-5280-e4ea275a84c3" [ 1865.021191] env[62813]: _type = "Task" [ 1865.021191] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.028809] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52e1062d-7f99-7066-5280-e4ea275a84c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.094957] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1865.095355] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1865.095646] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Deleting the datastore file [datastore2] 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1865.096124] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-747f8096-73fd-4b25-b14d-ee30b0db7275 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.102972] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 1865.102972] env[62813]: value = "task-4267755" [ 1865.102972] env[62813]: _type = "Task" [ 1865.102972] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.111175] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': task-4267755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.164400] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1865.164654] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1865.532118] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1865.532417] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Creating directory with path [datastore2] vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1865.532640] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29a7866a-54ac-4845-8ca2-d7ce8bc48600 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.544739] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Created directory with path [datastore2] vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1865.544961] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Fetch image to [datastore2] vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1865.545170] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1865.545956] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387edebe-72f9-4494-abcd-b6d0c87417f5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.553883] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04038e31-c22c-401d-aa21-3f62dd2e0a25 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.564584] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3781a303-e0e0-42e4-b0d7-68ec0182ecc0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.596619] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66bd9d9-1a21-426e-9ff1-60547308670f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.603432] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4c1e9de9-ec75-425d-b1ef-b0c4421bd099 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.612913] env[62813]: DEBUG oslo_vmware.api [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': task-4267755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066386} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.612913] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1865.613152] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1865.613210] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1865.613402] env[62813]: INFO nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1865.615650] env[62813]: DEBUG nova.compute.claims [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1865.615822] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.616045] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.629086] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1865.687487] env[62813]: DEBUG oslo_vmware.rw_handles [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1865.761841] env[62813]: DEBUG oslo_vmware.rw_handles [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1865.762093] env[62813]: DEBUG oslo_vmware.rw_handles [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1865.901645] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657521c0-3437-4104-b11c-7cef585e3e68 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.909730] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9741bd6e-9fe6-4cf8-8353-dc48bbd56134 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.940705] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46732911-b35c-4486-aa87-afd3974eb1a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.949675] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e35fac2-bd60-4217-a6c5-4f98f104a8a2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.963648] env[62813]: DEBUG nova.compute.provider_tree [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.972935] env[62813]: DEBUG nova.scheduler.client.report [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1865.987791] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.372s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.988340] env[62813]: ERROR nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1865.988340] env[62813]: Faults: ['InvalidArgument'] [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Traceback (most recent call last): [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self.driver.spawn(context, instance, image_meta, [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self._fetch_image_if_missing(context, vi) [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] image_cache(vi, tmp_image_ds_loc) [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] vm_util.copy_virtual_disk( [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] session._wait_for_task(vmdk_copy_task) [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] return self.wait_for_task(task_ref) [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] return evt.wait() [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] result = hub.switch() [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] return self.greenlet.switch() [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] self.f(*self.args, **self.kw) [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] raise exceptions.translate_fault(task_info.error) [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Faults: ['InvalidArgument'] [ 1865.988340] env[62813]: ERROR nova.compute.manager [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] [ 1865.989589] env[62813]: DEBUG nova.compute.utils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1865.990523] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Build of instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf was re-scheduled: A specified parameter was not correct: fileType [ 1865.990523] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1865.990913] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1865.991103] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1865.991286] env[62813]: DEBUG nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1865.991458] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1866.327789] env[62813]: DEBUG nova.network.neutron [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.339102] env[62813]: INFO nova.compute.manager [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Took 0.35 seconds to deallocate network for instance. [ 1866.433852] env[62813]: INFO nova.scheduler.client.report [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Deleted allocations for instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf [ 1866.460412] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8d5f00d9-8849-4f6a-8e01-bb2579b55c88 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.610s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.462078] env[62813]: DEBUG oslo_concurrency.lockutils [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.155s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.463030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.463030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.463030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.464828] env[62813]: INFO nova.compute.manager [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Terminating instance [ 1866.466725] env[62813]: DEBUG nova.compute.manager [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1866.466930] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1866.468231] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34f4550a-5ebe-490b-a72b-5dae670f1f34 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.473342] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1866.481177] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97297d1e-68e6-4db3-8f60-587f1ee8565b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.511375] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf could not be found. [ 1866.511617] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1866.511801] env[62813]: INFO nova.compute.manager [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1866.512082] env[62813]: DEBUG oslo.service.loopingcall [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.512725] env[62813]: DEBUG nova.compute.manager [-] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1866.512833] env[62813]: DEBUG nova.network.neutron [-] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1866.529657] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.529956] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.531913] env[62813]: INFO nova.compute.claims [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1866.542488] env[62813]: DEBUG nova.network.neutron [-] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.554496] env[62813]: INFO nova.compute.manager [-] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] Took 0.04 seconds to deallocate network for instance. [ 1866.671931] env[62813]: DEBUG oslo_concurrency.lockutils [None req-875059c6-358b-4d2d-abe2-a567a1aaa8cf tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.672923] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 118.086s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.673085] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 07c9c697-f11b-410e-b8d7-edf7cc7f0fcf] During sync_power_state the instance has a pending task (deleting). Skip. [ 1866.673252] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "07c9c697-f11b-410e-b8d7-edf7cc7f0fcf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.779138] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea24e7f-dec3-493d-a480-fd42610f6474 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.786741] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876d29c8-e1a5-4a64-8088-da058cb380e1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.817352] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3b3c87-41a8-40bd-a070-bdab1c87e390 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.825204] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374b7b2b-f75a-40cb-90c6-5a92914dd291 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.839118] env[62813]: DEBUG nova.compute.provider_tree [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1866.849690] env[62813]: DEBUG nova.scheduler.client.report [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1866.868597] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.339s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.869144] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1866.916127] env[62813]: DEBUG nova.compute.utils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1866.917107] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1866.917359] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1866.927719] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1866.990250] env[62813]: DEBUG nova.policy [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f50715357b6e4ec89f1107a2008ca866', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc980771eb5c4e7287018905c618d7f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1866.998346] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1867.025089] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1867.025315] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1867.025479] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1867.025664] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1867.025812] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1867.025992] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1867.026238] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1867.026405] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1867.026575] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1867.026738] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1867.026959] env[62813]: DEBUG nova.virt.hardware [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1867.027810] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd82f523-b2aa-45ea-b796-fbc45f04dc9f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.037959] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe188449-1d37-4393-bdfe-6cc2ea39c3aa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.341333] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Successfully created port: ade07c33-2039-47fd-9262-2acff0ffc638 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1868.164125] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1868.164509] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1868.164509] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1868.189159] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.189401] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.189521] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.189674] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.189801] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.189921] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.190056] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.190182] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.190301] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.190439] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1868.190576] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1868.292975] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Successfully updated port: ade07c33-2039-47fd-9262-2acff0ffc638 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1868.305424] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "refresh_cache-7f344eb3-b1a2-454f-a647-2d9ec7da915f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.305902] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquired lock "refresh_cache-7f344eb3-b1a2-454f-a647-2d9ec7da915f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.306296] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1868.352497] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1868.364215] env[62813]: DEBUG nova.compute.manager [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Received event network-vif-plugged-ade07c33-2039-47fd-9262-2acff0ffc638 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1868.364215] env[62813]: DEBUG oslo_concurrency.lockutils [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] Acquiring lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.364215] env[62813]: DEBUG oslo_concurrency.lockutils [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.364215] env[62813]: DEBUG oslo_concurrency.lockutils [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.364588] env[62813]: DEBUG nova.compute.manager [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] No waiting events found dispatching network-vif-plugged-ade07c33-2039-47fd-9262-2acff0ffc638 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1868.364588] env[62813]: WARNING nova.compute.manager [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Received unexpected event network-vif-plugged-ade07c33-2039-47fd-9262-2acff0ffc638 for instance with vm_state building and task_state spawning. [ 1868.364759] env[62813]: DEBUG nova.compute.manager [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Received event network-changed-ade07c33-2039-47fd-9262-2acff0ffc638 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1868.364917] env[62813]: DEBUG nova.compute.manager [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Refreshing instance network info cache due to event network-changed-ade07c33-2039-47fd-9262-2acff0ffc638. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1868.365130] env[62813]: DEBUG oslo_concurrency.lockutils [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] Acquiring lock "refresh_cache-7f344eb3-b1a2-454f-a647-2d9ec7da915f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.565547] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Updating instance_info_cache with network_info: [{"id": "ade07c33-2039-47fd-9262-2acff0ffc638", "address": "fa:16:3e:9e:9e:ca", "network": {"id": "468350e0-3969-44c2-bd1c-3ddd8da2be66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1141856924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc980771eb5c4e7287018905c618d7f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapade07c33-20", "ovs_interfaceid": "ade07c33-2039-47fd-9262-2acff0ffc638", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.582544] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Releasing lock "refresh_cache-7f344eb3-b1a2-454f-a647-2d9ec7da915f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.582864] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance network_info: |[{"id": "ade07c33-2039-47fd-9262-2acff0ffc638", "address": "fa:16:3e:9e:9e:ca", "network": {"id": "468350e0-3969-44c2-bd1c-3ddd8da2be66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1141856924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc980771eb5c4e7287018905c618d7f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapade07c33-20", "ovs_interfaceid": "ade07c33-2039-47fd-9262-2acff0ffc638", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1868.583211] env[62813]: DEBUG oslo_concurrency.lockutils [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] Acquired lock "refresh_cache-7f344eb3-b1a2-454f-a647-2d9ec7da915f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.583436] env[62813]: DEBUG nova.network.neutron [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Refreshing network info cache for port ade07c33-2039-47fd-9262-2acff0ffc638 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1868.584616] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:9e:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ade07c33-2039-47fd-9262-2acff0ffc638', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1868.592261] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Creating folder: Project (dc980771eb5c4e7287018905c618d7f1). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1868.595643] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-582d0677-baaf-4fad-a6fc-d3bc205ee425 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.608827] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Created folder: Project (dc980771eb5c4e7287018905c618d7f1) in parent group-v840812. [ 1868.608827] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Creating folder: Instances. Parent ref: group-v840917. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1868.609112] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1327700-b9ab-4394-907c-6403f7cc89b8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.618843] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Created folder: Instances in parent group-v840917. [ 1868.619545] env[62813]: DEBUG oslo.service.loopingcall [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.619545] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1868.619545] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03cec047-ecad-45c1-9ce5-5999b2903bf0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.643104] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1868.643104] env[62813]: value = "task-4267758" [ 1868.643104] env[62813]: _type = "Task" [ 1868.643104] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.653713] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267758, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.965069] env[62813]: DEBUG nova.network.neutron [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Updated VIF entry in instance network info cache for port ade07c33-2039-47fd-9262-2acff0ffc638. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1868.965069] env[62813]: DEBUG nova.network.neutron [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Updating instance_info_cache with network_info: [{"id": "ade07c33-2039-47fd-9262-2acff0ffc638", "address": "fa:16:3e:9e:9e:ca", "network": {"id": "468350e0-3969-44c2-bd1c-3ddd8da2be66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1141856924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc980771eb5c4e7287018905c618d7f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapade07c33-20", "ovs_interfaceid": "ade07c33-2039-47fd-9262-2acff0ffc638", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.975729] env[62813]: DEBUG oslo_concurrency.lockutils [req-02ecf0fe-bef6-47bd-aa7f-dacc40c35e33 req-2f3f8939-77c3-47ef-913a-49935abb5ddd service nova] Releasing lock "refresh_cache-7f344eb3-b1a2-454f-a647-2d9ec7da915f" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.154024] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267758, 'name': CreateVM_Task, 'duration_secs': 0.304659} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.154222] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1869.161417] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.161594] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.161960] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1869.162239] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea19ba0e-dfd8-4955-9cce-24671b607ee7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.167321] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Waiting for the task: (returnval){ [ 1869.167321] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52653ea6-e65e-a9d1-a498-87a98827da4f" [ 1869.167321] env[62813]: _type = "Task" [ 1869.167321] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.175776] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52653ea6-e65e-a9d1-a498-87a98827da4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.679834] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.680090] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1869.680306] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.164120] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.164221] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.158816] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.163522] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.175598] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.175900] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.176017] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.176184] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1873.177363] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34182450-3619-4f58-975f-cd57b5c881e6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.186396] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937481fc-fae9-4557-bb70-44ab744752a3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.200676] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02516388-d13e-4071-aaf2-c62398977664 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.207500] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33047ae-b008-4544-aa94-b50b3c4d18fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.236222] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180767MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1873.236398] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.236559] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.313165] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance c9402929-e845-416b-91e5-39d08ab90a2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.313767] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.313767] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.313767] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.313767] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.313998] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.313998] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.314092] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.314214] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.314342] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1873.329076] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1873.340693] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1873.352146] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1873.352398] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1873.352579] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1873.525712] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85ecafa-c972-4841-86fb-8b31a60f4f57 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.533364] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34e25c1-d84e-409c-8735-7413931f888b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.564689] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab22a927-9984-4418-87a1-c2e5d887897a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.572147] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b62616-d339-47f5-9808-32ee25f83d14 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.585113] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.593826] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1873.608322] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1873.608516] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.372s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.604548] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.163635] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.494106] env[62813]: WARNING oslo_vmware.rw_handles [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1914.494106] env[62813]: ERROR oslo_vmware.rw_handles [ 1914.494106] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1914.495064] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1914.496311] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Copying Virtual Disk [datastore2] vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/7f43a86d-08fa-442b-89ad-8bbb5de82003/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1914.496440] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-130c4ac0-6ea1-425a-96d8-391f9698e34f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.505476] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Waiting for the task: (returnval){ [ 1914.505476] env[62813]: value = "task-4267759" [ 1914.505476] env[62813]: _type = "Task" [ 1914.505476] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.513628] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Task: {'id': task-4267759, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.017051] env[62813]: DEBUG oslo_vmware.exceptions [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1915.017370] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.018042] env[62813]: ERROR nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1915.018042] env[62813]: Faults: ['InvalidArgument'] [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Traceback (most recent call last): [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] yield resources [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self.driver.spawn(context, instance, image_meta, [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self._fetch_image_if_missing(context, vi) [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] image_cache(vi, tmp_image_ds_loc) [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] vm_util.copy_virtual_disk( [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] session._wait_for_task(vmdk_copy_task) [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] return self.wait_for_task(task_ref) [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] return evt.wait() [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] result = hub.switch() [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] return self.greenlet.switch() [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self.f(*self.args, **self.kw) [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] raise exceptions.translate_fault(task_info.error) [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Faults: ['InvalidArgument'] [ 1915.018042] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] [ 1915.019052] env[62813]: INFO nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Terminating instance [ 1915.020050] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.020612] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1915.020612] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a794851-fdcb-4709-944b-0f75975a7c4a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.022844] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1915.023062] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1915.023801] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5036c36d-c380-428b-8872-1e9912246b4e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.030943] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1915.031207] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9290fdee-fd4f-445c-84eb-371ae08d2514 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.033529] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1915.033707] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1915.034773] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebbbfa23-b254-44cf-93b4-b5eae838e35e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.039921] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Waiting for the task: (returnval){ [ 1915.039921] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52001da4-81a8-a490-73f5-43d9251259e8" [ 1915.039921] env[62813]: _type = "Task" [ 1915.039921] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.047983] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52001da4-81a8-a490-73f5-43d9251259e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.111892] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1915.112137] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1915.112327] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Deleting the datastore file [datastore2] c9402929-e845-416b-91e5-39d08ab90a2e {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1915.112649] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30014ee0-ffe9-4d2a-bb19-b5992055d3c9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.119636] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Waiting for the task: (returnval){ [ 1915.119636] env[62813]: value = "task-4267761" [ 1915.119636] env[62813]: _type = "Task" [ 1915.119636] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.127960] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Task: {'id': task-4267761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.550159] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1915.550510] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Creating directory with path [datastore2] vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1915.550670] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6c667b0-2d2f-4074-bf5e-762374e440d5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.563375] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Created directory with path [datastore2] vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1915.563589] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Fetch image to [datastore2] vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1915.563759] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1915.564549] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36a510b-c084-443f-9ea6-1eb83f4f8088 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.571664] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da7e869-c858-4582-b277-40d67fd2c604 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.580958] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144b89b5-cf97-4d45-a10c-3c68273decc9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.611299] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4733c2-a84e-4262-af7f-e482a9c7fd00 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.617670] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eca104b5-0c85-4903-8ce1-4377cfdb43e9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.629105] env[62813]: DEBUG oslo_vmware.api [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Task: {'id': task-4267761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070004} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.629361] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1915.629545] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1915.629722] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1915.629943] env[62813]: INFO nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1915.632202] env[62813]: DEBUG nova.compute.claims [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1915.632380] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.632601] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.643571] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1915.827732] env[62813]: DEBUG oslo_vmware.rw_handles [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1915.886766] env[62813]: DEBUG oslo_vmware.rw_handles [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1915.886960] env[62813]: DEBUG oslo_vmware.rw_handles [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1915.909289] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5982a302-596d-4f43-803e-a7622875134b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.917741] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fe204a-9641-4a8c-970c-77fe9ae382ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.948957] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7435b5-5dad-40b3-96f5-b2d2897cb6ef {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.957027] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecc8ee5-40db-4944-81ad-a9c7cdafb0ca {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.970727] env[62813]: DEBUG nova.compute.provider_tree [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1915.981415] env[62813]: DEBUG nova.scheduler.client.report [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1915.996174] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.996714] env[62813]: ERROR nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1915.996714] env[62813]: Faults: ['InvalidArgument'] [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Traceback (most recent call last): [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self.driver.spawn(context, instance, image_meta, [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self._fetch_image_if_missing(context, vi) [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] image_cache(vi, tmp_image_ds_loc) [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] vm_util.copy_virtual_disk( [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] session._wait_for_task(vmdk_copy_task) [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] return self.wait_for_task(task_ref) [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] return evt.wait() [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] result = hub.switch() [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] return self.greenlet.switch() [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] self.f(*self.args, **self.kw) [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] raise exceptions.translate_fault(task_info.error) [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Faults: ['InvalidArgument'] [ 1915.996714] env[62813]: ERROR nova.compute.manager [instance: c9402929-e845-416b-91e5-39d08ab90a2e] [ 1915.997752] env[62813]: DEBUG nova.compute.utils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1915.999314] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Build of instance c9402929-e845-416b-91e5-39d08ab90a2e was re-scheduled: A specified parameter was not correct: fileType [ 1915.999314] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1915.999699] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1915.999912] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1916.000137] env[62813]: DEBUG nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1916.000371] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1916.463733] env[62813]: DEBUG nova.network.neutron [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.476473] env[62813]: INFO nova.compute.manager [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Took 0.48 seconds to deallocate network for instance. [ 1916.584338] env[62813]: INFO nova.scheduler.client.report [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Deleted allocations for instance c9402929-e845-416b-91e5-39d08ab90a2e [ 1916.608023] env[62813]: DEBUG oslo_concurrency.lockutils [None req-33c6995e-c55e-4b37-aeb1-208f718e10ab tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "c9402929-e845-416b-91e5-39d08ab90a2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.559s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.609339] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "c9402929-e845-416b-91e5-39d08ab90a2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.423s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.609492] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Acquiring lock "c9402929-e845-416b-91e5-39d08ab90a2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.609662] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "c9402929-e845-416b-91e5-39d08ab90a2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.609823] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "c9402929-e845-416b-91e5-39d08ab90a2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.611937] env[62813]: INFO nova.compute.manager [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Terminating instance [ 1916.613668] env[62813]: DEBUG nova.compute.manager [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1916.613864] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1916.614375] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b226b5c-6e56-4028-b468-6f46cc4737d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.623979] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d472c90c-f103-4ffe-a2af-d49c21af8497 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.639117] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1916.655573] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9402929-e845-416b-91e5-39d08ab90a2e could not be found. [ 1916.655727] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1916.655842] env[62813]: INFO nova.compute.manager [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1916.656108] env[62813]: DEBUG oslo.service.loopingcall [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1916.656362] env[62813]: DEBUG nova.compute.manager [-] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1916.656467] env[62813]: DEBUG nova.network.neutron [-] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1916.685394] env[62813]: DEBUG nova.network.neutron [-] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.695691] env[62813]: INFO nova.compute.manager [-] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] Took 0.04 seconds to deallocate network for instance. [ 1916.752719] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.752719] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.754027] env[62813]: INFO nova.compute.claims [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1916.807692] env[62813]: DEBUG oslo_concurrency.lockutils [None req-62353100-903b-4eb6-913e-d3fe7487ca83 tempest-ServersNegativeTestMultiTenantJSON-314877174 tempest-ServersNegativeTestMultiTenantJSON-314877174-project-member] Lock "c9402929-e845-416b-91e5-39d08ab90a2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.808879] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "c9402929-e845-416b-91e5-39d08ab90a2e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 168.222s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.809203] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: c9402929-e845-416b-91e5-39d08ab90a2e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1916.809397] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "c9402929-e845-416b-91e5-39d08ab90a2e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.963922] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b21f18-f508-4a17-932b-b5f1d86c4416 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.972122] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da0ddfa-fe41-4fed-b2d6-1c1ed5fd7d23 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.002937] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500ff881-e5ff-4ec2-923d-231aed59e558 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.010582] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f42971c-9d25-4241-b1b6-149f37d580e2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.023873] env[62813]: DEBUG nova.compute.provider_tree [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.035122] env[62813]: DEBUG nova.scheduler.client.report [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1917.051172] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.299s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.051679] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1917.086272] env[62813]: DEBUG nova.compute.utils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1917.088417] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1917.088598] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1917.096252] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1917.156353] env[62813]: DEBUG nova.policy [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e83a1e9938040319abff86403da1abd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed4f82f29464418095009edeaaabf851', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1917.166710] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1917.195496] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1917.195772] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1917.195934] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1917.196140] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1917.196290] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1917.196470] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1917.196702] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1917.196866] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1917.197046] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1917.197217] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1917.197395] env[62813]: DEBUG nova.virt.hardware [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1917.198403] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dbafeb-d929-41be-895a-da1276d4d27c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.209146] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25d8def-c02b-4a83-bed1-bedf18f00163 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.771132] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Successfully created port: 45d5e964-16b8-401e-88a0-81f46bd84236 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1918.423946] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Successfully updated port: 45d5e964-16b8-401e-88a0-81f46bd84236 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1918.436232] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "refresh_cache-dcc68892-3e75-4da9-975a-5b41c69205f7" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.436392] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired lock "refresh_cache-dcc68892-3e75-4da9-975a-5b41c69205f7" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.436546] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1918.513509] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1918.589045] env[62813]: DEBUG nova.compute.manager [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Received event network-vif-plugged-45d5e964-16b8-401e-88a0-81f46bd84236 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1918.589385] env[62813]: DEBUG oslo_concurrency.lockutils [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] Acquiring lock "dcc68892-3e75-4da9-975a-5b41c69205f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.589539] env[62813]: DEBUG oslo_concurrency.lockutils [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.589717] env[62813]: DEBUG oslo_concurrency.lockutils [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.589905] env[62813]: DEBUG nova.compute.manager [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] No waiting events found dispatching network-vif-plugged-45d5e964-16b8-401e-88a0-81f46bd84236 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1918.590560] env[62813]: WARNING nova.compute.manager [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Received unexpected event network-vif-plugged-45d5e964-16b8-401e-88a0-81f46bd84236 for instance with vm_state building and task_state spawning. [ 1918.590827] env[62813]: DEBUG nova.compute.manager [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Received event network-changed-45d5e964-16b8-401e-88a0-81f46bd84236 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1918.591095] env[62813]: DEBUG nova.compute.manager [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Refreshing instance network info cache due to event network-changed-45d5e964-16b8-401e-88a0-81f46bd84236. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1918.591341] env[62813]: DEBUG oslo_concurrency.lockutils [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] Acquiring lock "refresh_cache-dcc68892-3e75-4da9-975a-5b41c69205f7" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.766328] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Updating instance_info_cache with network_info: [{"id": "45d5e964-16b8-401e-88a0-81f46bd84236", "address": "fa:16:3e:93:f2:81", "network": {"id": "3634497e-4629-49a7-8257-310f15553ab0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-918616078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed4f82f29464418095009edeaaabf851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45d5e964-16", "ovs_interfaceid": "45d5e964-16b8-401e-88a0-81f46bd84236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.779847] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Releasing lock "refresh_cache-dcc68892-3e75-4da9-975a-5b41c69205f7" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.780266] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance network_info: |[{"id": "45d5e964-16b8-401e-88a0-81f46bd84236", "address": "fa:16:3e:93:f2:81", "network": {"id": "3634497e-4629-49a7-8257-310f15553ab0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-918616078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed4f82f29464418095009edeaaabf851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45d5e964-16", "ovs_interfaceid": "45d5e964-16b8-401e-88a0-81f46bd84236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1918.780541] env[62813]: DEBUG oslo_concurrency.lockutils [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] Acquired lock "refresh_cache-dcc68892-3e75-4da9-975a-5b41c69205f7" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.780737] env[62813]: DEBUG nova.network.neutron [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Refreshing network info cache for port 45d5e964-16b8-401e-88a0-81f46bd84236 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1918.781863] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:f2:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45d5e964-16b8-401e-88a0-81f46bd84236', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1918.789754] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Creating folder: Project (ed4f82f29464418095009edeaaabf851). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1918.790779] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-601dddd4-0227-41b0-a0ce-13010d15e4b3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.804569] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Created folder: Project (ed4f82f29464418095009edeaaabf851) in parent group-v840812. [ 1918.804789] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Creating folder: Instances. Parent ref: group-v840920. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1918.805049] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88fbf5c9-af53-4675-b8cd-e27fab82f827 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.817795] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Created folder: Instances in parent group-v840920. [ 1918.818088] env[62813]: DEBUG oslo.service.loopingcall [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1918.818288] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1918.818502] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6051d058-2791-48d5-a742-09430c0d703e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.839471] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1918.839471] env[62813]: value = "task-4267764" [ 1918.839471] env[62813]: _type = "Task" [ 1918.839471] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.847838] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267764, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.115137] env[62813]: DEBUG nova.network.neutron [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Updated VIF entry in instance network info cache for port 45d5e964-16b8-401e-88a0-81f46bd84236. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1919.115530] env[62813]: DEBUG nova.network.neutron [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Updating instance_info_cache with network_info: [{"id": "45d5e964-16b8-401e-88a0-81f46bd84236", "address": "fa:16:3e:93:f2:81", "network": {"id": "3634497e-4629-49a7-8257-310f15553ab0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-918616078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed4f82f29464418095009edeaaabf851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45d5e964-16", "ovs_interfaceid": "45d5e964-16b8-401e-88a0-81f46bd84236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.127557] env[62813]: DEBUG oslo_concurrency.lockutils [req-38e636f4-372d-4359-84a0-64fe29c5fccb req-c2ace1f9-c893-4e0e-af47-262a58c3a3cf service nova] Releasing lock "refresh_cache-dcc68892-3e75-4da9-975a-5b41c69205f7" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.350458] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267764, 'name': CreateVM_Task, 'duration_secs': 0.322079} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.350714] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1919.351367] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.351541] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.351886] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1919.352203] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd2e5d89-932e-46f6-b3f3-c019eb162d8c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.357100] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for the task: (returnval){ [ 1919.357100] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52a6eea9-7284-ea7e-c4be-54ef7c2474fd" [ 1919.357100] env[62813]: _type = "Task" [ 1919.357100] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.365398] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52a6eea9-7284-ea7e-c4be-54ef7c2474fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.868167] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.868543] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1919.868653] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.163945] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1923.164335] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1926.164218] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1926.164609] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1928.164645] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1928.165043] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1928.165043] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1928.187830] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188028] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188143] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188274] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188398] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188520] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188640] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188759] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188879] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.188995] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1928.189122] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1932.420862] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.164076] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.163614] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.163971] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.179130] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.179507] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.179507] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.180824] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1934.180918] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0cfa53-2248-439a-942a-366b0f9631ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.190440] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9627f982-c587-4d58-8ea1-4a46e5d28492 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.206367] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669565a2-a207-4fc8-842a-d007cc5313bb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.213544] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37f161a-c91c-4f79-9b9c-d8e8b93bba11 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.244030] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180755MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1934.244030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.244030] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.330366] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance d5f63ddc-e786-471d-a871-2ef878bd2455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.330557] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.330688] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.330812] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.330933] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.331093] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.331229] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.331348] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.331463] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.331577] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1934.346604] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1934.358109] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1934.358372] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1934.358524] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1934.504868] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7af969e-e8d6-482a-97a1-29bab5e5a1fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.513025] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eb6fe1-7a0d-4562-98ac-365f5bc28063 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.544024] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb941770-5c5e-4de9-bee4-e7b25c09757b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.552288] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829de305-e78e-4078-a90d-5dff59fa732a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.361959] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1935.369888] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1935.386981] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1935.386981] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.143s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.383076] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.163360] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.917078] env[62813]: WARNING oslo_vmware.rw_handles [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1965.917078] env[62813]: ERROR oslo_vmware.rw_handles [ 1965.917897] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1965.919795] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1965.920061] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Copying Virtual Disk [datastore2] vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/fd952c88-2202-42d8-8f94-85f4d3dc92c6/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1965.920352] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cbd5169-a69b-4245-8bc4-5e6287d69865 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.928797] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Waiting for the task: (returnval){ [ 1965.928797] env[62813]: value = "task-4267765" [ 1965.928797] env[62813]: _type = "Task" [ 1965.928797] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.936978] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Task: {'id': task-4267765, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.440262] env[62813]: DEBUG oslo_vmware.exceptions [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1966.440262] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.440262] env[62813]: ERROR nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.440262] env[62813]: Faults: ['InvalidArgument'] [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Traceback (most recent call last): [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] yield resources [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self.driver.spawn(context, instance, image_meta, [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self._fetch_image_if_missing(context, vi) [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] image_cache(vi, tmp_image_ds_loc) [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] vm_util.copy_virtual_disk( [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] session._wait_for_task(vmdk_copy_task) [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] return self.wait_for_task(task_ref) [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] return evt.wait() [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] result = hub.switch() [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] return self.greenlet.switch() [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self.f(*self.args, **self.kw) [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] raise exceptions.translate_fault(task_info.error) [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Faults: ['InvalidArgument'] [ 1966.440262] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] [ 1966.441764] env[62813]: INFO nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Terminating instance [ 1966.442102] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.442321] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1966.442559] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ae3c2ea-9510-4f5e-861f-df1bb97907f6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.444751] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1966.444952] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1966.445674] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb48e14-082d-4133-a9da-d863e4787ab1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.452506] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1966.452756] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bdce278-0088-495b-b3a7-324186a379e7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.454994] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1966.455179] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1966.456101] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b7532a2-3aa7-4bdc-93b3-316c8951fb69 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.460915] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1966.460915] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5211c081-e4cf-9b87-c8b7-04a0ba8ca79b" [ 1966.460915] env[62813]: _type = "Task" [ 1966.460915] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.468447] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5211c081-e4cf-9b87-c8b7-04a0ba8ca79b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.522548] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1966.522868] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1966.523082] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Deleting the datastore file [datastore2] d5f63ddc-e786-471d-a871-2ef878bd2455 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1966.523366] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e8bc321-df1a-46de-97c6-2ed971bfbfda {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.530106] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Waiting for the task: (returnval){ [ 1966.530106] env[62813]: value = "task-4267767" [ 1966.530106] env[62813]: _type = "Task" [ 1966.530106] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.537865] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Task: {'id': task-4267767, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.975139] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1966.975487] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating directory with path [datastore2] vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1966.975866] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66b06cfc-a0a4-4e41-97d8-2b03dde48ace {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.989176] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created directory with path [datastore2] vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1966.989392] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Fetch image to [datastore2] vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1966.989568] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1966.990354] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6107ca4f-54fc-4da3-a3fc-1e36a6683ab2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.997397] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93999fd6-612d-46c8-826e-0b9050e91297 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.006936] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f26a05-179b-4a33-9ccb-7c17025cc65f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.042546] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c152fc12-6d45-4c7c-864d-17f73692c560 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.050428] env[62813]: DEBUG oslo_vmware.api [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Task: {'id': task-4267767, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080181} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.051929] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1967.052136] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1967.052313] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1967.052492] env[62813]: INFO nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1967.054609] env[62813]: DEBUG nova.compute.claims [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1967.054824] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.055062] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.057598] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6e5a9d15-24f3-4b15-9d26-eae02145eb36 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.082261] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1967.146313] env[62813]: DEBUG oslo_vmware.rw_handles [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1967.205862] env[62813]: DEBUG oslo_vmware.rw_handles [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1967.206104] env[62813]: DEBUG oslo_vmware.rw_handles [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1967.321100] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99952bbc-2ebe-4660-b3db-5b99f08b77b6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.329042] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a00144-1faf-4b68-a31a-25c1517c2547 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.359871] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292558d1-726f-440f-b58c-8477fc792488 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.367997] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4e898f-2710-483f-88ef-339c6fcec5fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.381433] env[62813]: DEBUG nova.compute.provider_tree [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.390422] env[62813]: DEBUG nova.scheduler.client.report [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1967.406355] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.351s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.406907] env[62813]: ERROR nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.406907] env[62813]: Faults: ['InvalidArgument'] [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Traceback (most recent call last): [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self.driver.spawn(context, instance, image_meta, [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self._fetch_image_if_missing(context, vi) [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] image_cache(vi, tmp_image_ds_loc) [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] vm_util.copy_virtual_disk( [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] session._wait_for_task(vmdk_copy_task) [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] return self.wait_for_task(task_ref) [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] return evt.wait() [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] result = hub.switch() [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] return self.greenlet.switch() [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] self.f(*self.args, **self.kw) [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] raise exceptions.translate_fault(task_info.error) [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Faults: ['InvalidArgument'] [ 1967.406907] env[62813]: ERROR nova.compute.manager [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] [ 1967.407787] env[62813]: DEBUG nova.compute.utils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1967.409074] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Build of instance d5f63ddc-e786-471d-a871-2ef878bd2455 was re-scheduled: A specified parameter was not correct: fileType [ 1967.409074] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1967.409450] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1967.409623] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1967.409792] env[62813]: DEBUG nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1967.409971] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1967.835914] env[62813]: DEBUG nova.network.neutron [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.852027] env[62813]: INFO nova.compute.manager [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Took 0.44 seconds to deallocate network for instance. [ 1967.968561] env[62813]: INFO nova.scheduler.client.report [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Deleted allocations for instance d5f63ddc-e786-471d-a871-2ef878bd2455 [ 1967.995861] env[62813]: DEBUG oslo_concurrency.lockutils [None req-919f8a59-4b3e-40fa-86f1-69edbae06f83 tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.188s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.997168] env[62813]: DEBUG oslo_concurrency.lockutils [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.017s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.997397] env[62813]: DEBUG oslo_concurrency.lockutils [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Acquiring lock "d5f63ddc-e786-471d-a871-2ef878bd2455-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.997640] env[62813]: DEBUG oslo_concurrency.lockutils [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.997814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.000764] env[62813]: INFO nova.compute.manager [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Terminating instance [ 1968.002675] env[62813]: DEBUG nova.compute.manager [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1968.002893] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1968.003167] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f2bf462-f5e4-434e-bb52-97bba7e8f2d0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.013554] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9832c276-8bc1-4706-a145-c264e3de5c43 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.023861] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1968.045970] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d5f63ddc-e786-471d-a871-2ef878bd2455 could not be found. [ 1968.046204] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1968.046382] env[62813]: INFO nova.compute.manager [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1968.046633] env[62813]: DEBUG oslo.service.loopingcall [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1968.046860] env[62813]: DEBUG nova.compute.manager [-] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1968.046955] env[62813]: DEBUG nova.network.neutron [-] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1968.072498] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.072498] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.073790] env[62813]: INFO nova.compute.claims [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1968.076619] env[62813]: DEBUG nova.network.neutron [-] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.085035] env[62813]: INFO nova.compute.manager [-] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] Took 0.04 seconds to deallocate network for instance. [ 1968.204762] env[62813]: DEBUG oslo_concurrency.lockutils [None req-472cff67-c904-47e3-a84f-9cf0bf01a46f tempest-ListServersNegativeTestJSON-1997692357 tempest-ListServersNegativeTestJSON-1997692357-project-member] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.205603] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 219.619s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.205790] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: d5f63ddc-e786-471d-a871-2ef878bd2455] During sync_power_state the instance has a pending task (deleting). Skip. [ 1968.205960] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "d5f63ddc-e786-471d-a871-2ef878bd2455" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.300297] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c06cff8-523e-420c-8d15-c545d7fba929 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.308357] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8a79c5-995e-495c-b26c-421767081add {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.337549] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d05e1bc-49a4-4149-9db2-0d23b6623857 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.345354] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a369d6e-9b5b-4fa3-a28b-27ce7142769b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.359356] env[62813]: DEBUG nova.compute.provider_tree [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.368543] env[62813]: DEBUG nova.scheduler.client.report [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1968.383304] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.311s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.383809] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1968.419117] env[62813]: DEBUG nova.compute.utils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1968.420705] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1968.420873] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1968.430671] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1968.477196] env[62813]: DEBUG nova.policy [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e5639b4c294098ac97eae52872b91c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dab4ddba893f4b47886bb54e9083c414', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 1968.494703] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1968.519641] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1968.519930] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1968.520151] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1968.520415] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1968.520617] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1968.520873] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1968.521077] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1968.521294] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1968.521510] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1968.521711] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1968.521891] env[62813]: DEBUG nova.virt.hardware [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1968.522770] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8834e14-145a-4aed-a09c-d2adf36673bc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.531075] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0079519d-1049-4281-8171-579fef981db5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.833966] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Successfully created port: 942a3b53-67d7-49a8-a79a-2b953aa01ba9 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1969.802749] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Successfully updated port: 942a3b53-67d7-49a8-a79a-2b953aa01ba9 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1969.813686] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "refresh_cache-0dba8b6e-7927-432c-bd13-f5ce58f0c991" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.813879] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "refresh_cache-0dba8b6e-7927-432c-bd13-f5ce58f0c991" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.814105] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1969.858205] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1969.890963] env[62813]: DEBUG nova.compute.manager [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Received event network-vif-plugged-942a3b53-67d7-49a8-a79a-2b953aa01ba9 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1969.891217] env[62813]: DEBUG oslo_concurrency.lockutils [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] Acquiring lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.891474] env[62813]: DEBUG oslo_concurrency.lockutils [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.891626] env[62813]: DEBUG oslo_concurrency.lockutils [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.891801] env[62813]: DEBUG nova.compute.manager [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] No waiting events found dispatching network-vif-plugged-942a3b53-67d7-49a8-a79a-2b953aa01ba9 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1969.891953] env[62813]: WARNING nova.compute.manager [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Received unexpected event network-vif-plugged-942a3b53-67d7-49a8-a79a-2b953aa01ba9 for instance with vm_state building and task_state spawning. [ 1969.892266] env[62813]: DEBUG nova.compute.manager [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Received event network-changed-942a3b53-67d7-49a8-a79a-2b953aa01ba9 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1969.892441] env[62813]: DEBUG nova.compute.manager [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Refreshing instance network info cache due to event network-changed-942a3b53-67d7-49a8-a79a-2b953aa01ba9. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1969.892616] env[62813]: DEBUG oslo_concurrency.lockutils [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] Acquiring lock "refresh_cache-0dba8b6e-7927-432c-bd13-f5ce58f0c991" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.053133] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Updating instance_info_cache with network_info: [{"id": "942a3b53-67d7-49a8-a79a-2b953aa01ba9", "address": "fa:16:3e:d5:eb:b2", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942a3b53-67", "ovs_interfaceid": "942a3b53-67d7-49a8-a79a-2b953aa01ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.067263] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "refresh_cache-0dba8b6e-7927-432c-bd13-f5ce58f0c991" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.067580] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance network_info: |[{"id": "942a3b53-67d7-49a8-a79a-2b953aa01ba9", "address": "fa:16:3e:d5:eb:b2", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942a3b53-67", "ovs_interfaceid": "942a3b53-67d7-49a8-a79a-2b953aa01ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1970.067902] env[62813]: DEBUG oslo_concurrency.lockutils [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] Acquired lock "refresh_cache-0dba8b6e-7927-432c-bd13-f5ce58f0c991" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.068092] env[62813]: DEBUG nova.network.neutron [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Refreshing network info cache for port 942a3b53-67d7-49a8-a79a-2b953aa01ba9 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1970.070176] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:eb:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4fcde7-8926-402a-a9b7-4878d2bc1cf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '942a3b53-67d7-49a8-a79a-2b953aa01ba9', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1970.080028] env[62813]: DEBUG oslo.service.loopingcall [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1970.081205] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1970.084440] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4ea6cf3-e925-4c81-97ad-71587ef6ef12 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.105849] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1970.105849] env[62813]: value = "task-4267768" [ 1970.105849] env[62813]: _type = "Task" [ 1970.105849] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.113594] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267768, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.445111] env[62813]: DEBUG nova.network.neutron [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Updated VIF entry in instance network info cache for port 942a3b53-67d7-49a8-a79a-2b953aa01ba9. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1970.445474] env[62813]: DEBUG nova.network.neutron [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Updating instance_info_cache with network_info: [{"id": "942a3b53-67d7-49a8-a79a-2b953aa01ba9", "address": "fa:16:3e:d5:eb:b2", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942a3b53-67", "ovs_interfaceid": "942a3b53-67d7-49a8-a79a-2b953aa01ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.454915] env[62813]: DEBUG oslo_concurrency.lockutils [req-0825fab2-237b-4f51-853f-5bd955a993dd req-5afb8d21-a857-4ac5-8918-750caaaa4374 service nova] Releasing lock "refresh_cache-0dba8b6e-7927-432c-bd13-f5ce58f0c991" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.616012] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267768, 'name': CreateVM_Task, 'duration_secs': 0.467149} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.616263] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1970.616990] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.617149] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.617475] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1970.617727] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a3a6aba-1c5f-494f-911a-79688622afb0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.622520] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 1970.622520] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]525017df-3cf2-cf3c-4042-7ddd1e1d2381" [ 1970.622520] env[62813]: _type = "Task" [ 1970.622520] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.630636] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]525017df-3cf2-cf3c-4042-7ddd1e1d2381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.133230] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.133668] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1971.133849] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.975493] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "926a846d-f902-4ec3-898e-439f10b4ee68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.975799] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Lock "926a846d-f902-4ec3-898e-439f10b4ee68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.013698] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "dcc68892-3e75-4da9-975a-5b41c69205f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.164138] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1984.164412] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1987.164027] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1988.164868] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1990.164424] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1990.164858] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1990.164858] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1990.188020] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188450] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188450] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188450] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188586] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188708] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188861] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.188983] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.189114] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.189233] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1990.189352] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1993.163892] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.164743] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.159769] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.163616] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.177516] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.177838] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.177929] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.178103] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1995.179325] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe979110-7e14-4507-9ca8-45bc1b030c82 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.188538] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60c9b54-9380-4009-9410-0ac99ae0c699 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.202492] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f05be3-94a4-4949-9101-e755e9a8b8a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.209372] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a42791-2ddc-4fce-86e1-19e7aad06375 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.240417] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180770MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1995.240593] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.240786] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.320433] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.320591] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 2c94570a-7bb0-4719-9982-0e7710470db1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.320771] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.320952] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.321096] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.321222] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.321342] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.321461] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.321579] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.321770] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.333542] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1995.347791] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1995.347791] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1995.347791] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1995.519929] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34305c2b-c6de-474b-99f0-ab9cbbe6d34f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.528780] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b77ba66-bbcf-41ed-a86e-c80902a3910b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.557919] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0e08b6-d827-49d3-bcae-4a499315e5e1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.565993] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bd57c2-55fe-4389-b54d-37d50d533220 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.579850] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1995.588201] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1995.603538] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1995.603794] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.601327] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.625629] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.461277] env[62813]: WARNING oslo_vmware.rw_handles [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2012.461277] env[62813]: ERROR oslo_vmware.rw_handles [ 2012.461855] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2012.463687] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2012.463932] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Copying Virtual Disk [datastore2] vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/01feb3b3-7712-4727-b168-ed2ce8ab54a8/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2012.464241] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95531525-b504-44a8-86a4-675db773967e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.472150] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 2012.472150] env[62813]: value = "task-4267769" [ 2012.472150] env[62813]: _type = "Task" [ 2012.472150] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.480473] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.982804] env[62813]: DEBUG oslo_vmware.exceptions [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2012.983107] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.983759] env[62813]: ERROR nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2012.983759] env[62813]: Faults: ['InvalidArgument'] [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Traceback (most recent call last): [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] yield resources [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self.driver.spawn(context, instance, image_meta, [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self._fetch_image_if_missing(context, vi) [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] image_cache(vi, tmp_image_ds_loc) [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] vm_util.copy_virtual_disk( [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] session._wait_for_task(vmdk_copy_task) [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] return self.wait_for_task(task_ref) [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] return evt.wait() [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] result = hub.switch() [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] return self.greenlet.switch() [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self.f(*self.args, **self.kw) [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] raise exceptions.translate_fault(task_info.error) [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Faults: ['InvalidArgument'] [ 2012.983759] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] [ 2012.984881] env[62813]: INFO nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Terminating instance [ 2012.986043] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.986043] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2012.986164] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f64e2675-4391-4e86-9110-f8e88bf761dd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.988392] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2012.988587] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2012.989348] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c684915-9b66-4aed-a794-dd17cecb05c8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.996636] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2012.996860] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-813742aa-8053-40af-b6e0-6843e4ca7ea5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.999094] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2012.999273] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2013.000227] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12be1690-9faf-40b6-ba8c-384a4d71abff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.005143] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 2013.005143] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52cf12c2-d7bb-c198-a053-f821d816df2c" [ 2013.005143] env[62813]: _type = "Task" [ 2013.005143] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.015708] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52cf12c2-d7bb-c198-a053-f821d816df2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.070747] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2013.070996] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2013.071221] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleting the datastore file [datastore2] 2c94570a-7bb0-4719-9982-0e7710470db1 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2013.071494] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce6df8e5-4a48-44d5-9225-2fcd21df0471 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.078967] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 2013.078967] env[62813]: value = "task-4267771" [ 2013.078967] env[62813]: _type = "Task" [ 2013.078967] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.086860] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.516611] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2013.516974] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating directory with path [datastore2] vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2013.517160] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8ab5a2f-40bc-4671-b68f-d8e6a796dee5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.530164] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Created directory with path [datastore2] vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2013.530371] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Fetch image to [datastore2] vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2013.530547] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2013.531423] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb57212-3639-4941-9b44-9b8326327c91 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.538806] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d5a064-150b-4414-a593-5237b5310bb6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.548925] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d460a6-755d-472b-a4e4-29dcc4c50b35 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.584241] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c445864f-4a4a-41f1-8ee2-7ef4899adbf8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.592223] env[62813]: DEBUG oslo_vmware.api [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080038} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.594123] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2013.594345] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2013.594524] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2013.594702] env[62813]: INFO nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2013.596908] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b7876075-271e-44f1-b721-ca934c68e228 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.599234] env[62813]: DEBUG nova.compute.claims [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2013.599424] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.599644] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.627444] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2013.771725] env[62813]: DEBUG oslo_vmware.rw_handles [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2013.831232] env[62813]: DEBUG oslo_vmware.rw_handles [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2013.831449] env[62813]: DEBUG oslo_vmware.rw_handles [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2013.854475] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924bbac0-d1cd-4525-94b5-297eaaafe91b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.862530] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cd0e42-ba21-4bcd-adbe-661353902bbb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.891275] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504de76d-128f-4939-b924-f0b8f6521cf6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.898293] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394d9a67-3ed0-4b4b-8025-3e1164a7189d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.912560] env[62813]: DEBUG nova.compute.provider_tree [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2013.922862] env[62813]: DEBUG nova.scheduler.client.report [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2013.936497] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.937069] env[62813]: ERROR nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2013.937069] env[62813]: Faults: ['InvalidArgument'] [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Traceback (most recent call last): [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self.driver.spawn(context, instance, image_meta, [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self._fetch_image_if_missing(context, vi) [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] image_cache(vi, tmp_image_ds_loc) [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] vm_util.copy_virtual_disk( [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] session._wait_for_task(vmdk_copy_task) [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] return self.wait_for_task(task_ref) [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] return evt.wait() [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] result = hub.switch() [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] return self.greenlet.switch() [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] self.f(*self.args, **self.kw) [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] raise exceptions.translate_fault(task_info.error) [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Faults: ['InvalidArgument'] [ 2013.937069] env[62813]: ERROR nova.compute.manager [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] [ 2013.937820] env[62813]: DEBUG nova.compute.utils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2013.939189] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Build of instance 2c94570a-7bb0-4719-9982-0e7710470db1 was re-scheduled: A specified parameter was not correct: fileType [ 2013.939189] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2013.939573] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2013.939746] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2013.939914] env[62813]: DEBUG nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2013.940086] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2014.254725] env[62813]: DEBUG nova.network.neutron [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.267048] env[62813]: INFO nova.compute.manager [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Took 0.33 seconds to deallocate network for instance. [ 2014.366044] env[62813]: INFO nova.scheduler.client.report [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleted allocations for instance 2c94570a-7bb0-4719-9982-0e7710470db1 [ 2014.387791] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a784f18-bf45-4805-92df-fbec2bd5d232 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "2c94570a-7bb0-4719-9982-0e7710470db1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 586.229s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.389018] env[62813]: DEBUG oslo_concurrency.lockutils [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "2c94570a-7bb0-4719-9982-0e7710470db1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 390.012s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.389269] env[62813]: DEBUG oslo_concurrency.lockutils [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "2c94570a-7bb0-4719-9982-0e7710470db1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.389481] env[62813]: DEBUG oslo_concurrency.lockutils [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "2c94570a-7bb0-4719-9982-0e7710470db1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.389654] env[62813]: DEBUG oslo_concurrency.lockutils [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "2c94570a-7bb0-4719-9982-0e7710470db1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.391682] env[62813]: INFO nova.compute.manager [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Terminating instance [ 2014.393420] env[62813]: DEBUG nova.compute.manager [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2014.393612] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2014.394099] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6860d995-53c3-44f7-9d18-410aaf16cdf6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.403432] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8d5943-5ad8-41ea-8f71-e20bd14e5939 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.414768] env[62813]: DEBUG nova.compute.manager [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2014.436524] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2c94570a-7bb0-4719-9982-0e7710470db1 could not be found. [ 2014.436818] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2014.437013] env[62813]: INFO nova.compute.manager [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2014.437295] env[62813]: DEBUG oslo.service.loopingcall [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2014.437512] env[62813]: DEBUG nova.compute.manager [-] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2014.437608] env[62813]: DEBUG nova.network.neutron [-] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2014.470112] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.470428] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.472280] env[62813]: INFO nova.compute.claims [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2014.475793] env[62813]: DEBUG nova.network.neutron [-] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.488657] env[62813]: INFO nova.compute.manager [-] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] Took 0.05 seconds to deallocate network for instance. [ 2014.593774] env[62813]: DEBUG oslo_concurrency.lockutils [None req-08cfc1b9-d203-4a94-8db6-88b344523a80 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "2c94570a-7bb0-4719-9982-0e7710470db1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.595039] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "2c94570a-7bb0-4719-9982-0e7710470db1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 266.008s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.595368] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 2c94570a-7bb0-4719-9982-0e7710470db1] During sync_power_state the instance has a pending task (deleting). Skip. [ 2014.595543] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "2c94570a-7bb0-4719-9982-0e7710470db1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.687246] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83770b06-2080-43e0-8ce2-4ab364ddc6db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.698432] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c67737-61fb-476b-a471-d742d85c1199 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.725993] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8620984-14c3-4315-ad7e-42756902ee94 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.734424] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa62196-7d0e-472a-b6e8-c331da3d33ab {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.748142] env[62813]: DEBUG nova.compute.provider_tree [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2014.757807] env[62813]: DEBUG nova.scheduler.client.report [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2014.771221] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.301s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.771790] env[62813]: DEBUG nova.compute.manager [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2014.805897] env[62813]: DEBUG nova.compute.utils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2014.807862] env[62813]: DEBUG nova.compute.manager [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2014.808635] env[62813]: DEBUG nova.network.neutron [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2014.821233] env[62813]: DEBUG nova.compute.manager [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2014.882575] env[62813]: DEBUG nova.policy [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd11f7ba345546efb48ffec7e5b3509f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '510017635b604af399ad050646569f48', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2014.895329] env[62813]: DEBUG nova.compute.manager [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2014.921286] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2014.921533] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2014.921693] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2014.921886] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2014.922047] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2014.922199] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2014.922452] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2014.922581] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2014.922751] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2014.922918] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2014.923112] env[62813]: DEBUG nova.virt.hardware [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2014.923997] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4785b2d-7450-49df-82c9-008b4e0e6669 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.932983] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0eac2ef-4a24-4e9f-8011-653c54d41a9d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.443989] env[62813]: DEBUG nova.network.neutron [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Successfully created port: db3372bc-09ca-43aa-abec-510d7ff28ae3 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2016.155896] env[62813]: DEBUG nova.network.neutron [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Successfully updated port: db3372bc-09ca-43aa-abec-510d7ff28ae3 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2016.166994] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquiring lock "refresh_cache-769103f0-9ebd-4a7a-825f-bf7456cb6eb9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.167149] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquired lock "refresh_cache-769103f0-9ebd-4a7a-825f-bf7456cb6eb9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.167305] env[62813]: DEBUG nova.network.neutron [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2016.215682] env[62813]: DEBUG nova.network.neutron [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2016.382527] env[62813]: DEBUG nova.compute.manager [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Received event network-vif-plugged-db3372bc-09ca-43aa-abec-510d7ff28ae3 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2016.382862] env[62813]: DEBUG oslo_concurrency.lockutils [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] Acquiring lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.383063] env[62813]: DEBUG oslo_concurrency.lockutils [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] Lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.383558] env[62813]: DEBUG oslo_concurrency.lockutils [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] Lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.383767] env[62813]: DEBUG nova.compute.manager [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] No waiting events found dispatching network-vif-plugged-db3372bc-09ca-43aa-abec-510d7ff28ae3 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2016.383937] env[62813]: WARNING nova.compute.manager [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Received unexpected event network-vif-plugged-db3372bc-09ca-43aa-abec-510d7ff28ae3 for instance with vm_state building and task_state spawning. [ 2016.384132] env[62813]: DEBUG nova.compute.manager [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Received event network-changed-db3372bc-09ca-43aa-abec-510d7ff28ae3 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2016.384286] env[62813]: DEBUG nova.compute.manager [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Refreshing instance network info cache due to event network-changed-db3372bc-09ca-43aa-abec-510d7ff28ae3. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2016.384693] env[62813]: DEBUG oslo_concurrency.lockutils [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] Acquiring lock "refresh_cache-769103f0-9ebd-4a7a-825f-bf7456cb6eb9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.471887] env[62813]: DEBUG nova.network.neutron [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Updating instance_info_cache with network_info: [{"id": "db3372bc-09ca-43aa-abec-510d7ff28ae3", "address": "fa:16:3e:53:59:3e", "network": {"id": "85a1b681-36c7-4098-8f6c-667cc571afb4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1815219665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "510017635b604af399ad050646569f48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb3372bc-09", "ovs_interfaceid": "db3372bc-09ca-43aa-abec-510d7ff28ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.489688] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Releasing lock "refresh_cache-769103f0-9ebd-4a7a-825f-bf7456cb6eb9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.490166] env[62813]: DEBUG nova.compute.manager [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Instance network_info: |[{"id": "db3372bc-09ca-43aa-abec-510d7ff28ae3", "address": "fa:16:3e:53:59:3e", "network": {"id": "85a1b681-36c7-4098-8f6c-667cc571afb4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1815219665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "510017635b604af399ad050646569f48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb3372bc-09", "ovs_interfaceid": "db3372bc-09ca-43aa-abec-510d7ff28ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2016.490602] env[62813]: DEBUG oslo_concurrency.lockutils [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] Acquired lock "refresh_cache-769103f0-9ebd-4a7a-825f-bf7456cb6eb9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.490924] env[62813]: DEBUG nova.network.neutron [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Refreshing network info cache for port db3372bc-09ca-43aa-abec-510d7ff28ae3 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2016.492847] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:59:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db3372bc-09ca-43aa-abec-510d7ff28ae3', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2016.505369] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Creating folder: Project (510017635b604af399ad050646569f48). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2016.506433] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f640a6d2-dea6-450c-81e7-8c807d27dc17 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.520574] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Created folder: Project (510017635b604af399ad050646569f48) in parent group-v840812. [ 2016.520771] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Creating folder: Instances. Parent ref: group-v840924. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2016.521919] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67670999-c2f8-46b2-b6b6-71fd3bc33b1b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.531189] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Created folder: Instances in parent group-v840924. [ 2016.532031] env[62813]: DEBUG oslo.service.loopingcall [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2016.532031] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2016.532031] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a376c072-6a47-4206-865f-53b581e62931 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.554307] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2016.554307] env[62813]: value = "task-4267774" [ 2016.554307] env[62813]: _type = "Task" [ 2016.554307] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.562987] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267774, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.841597] env[62813]: DEBUG nova.network.neutron [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Updated VIF entry in instance network info cache for port db3372bc-09ca-43aa-abec-510d7ff28ae3. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2016.842229] env[62813]: DEBUG nova.network.neutron [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Updating instance_info_cache with network_info: [{"id": "db3372bc-09ca-43aa-abec-510d7ff28ae3", "address": "fa:16:3e:53:59:3e", "network": {"id": "85a1b681-36c7-4098-8f6c-667cc571afb4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1815219665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "510017635b604af399ad050646569f48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb3372bc-09", "ovs_interfaceid": "db3372bc-09ca-43aa-abec-510d7ff28ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.853364] env[62813]: DEBUG oslo_concurrency.lockutils [req-87dfbce1-3d76-4e52-88db-2808e9506323 req-08d80811-2469-4688-ae40-d3bf20a12c73 service nova] Releasing lock "refresh_cache-769103f0-9ebd-4a7a-825f-bf7456cb6eb9" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.064904] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267774, 'name': CreateVM_Task, 'duration_secs': 0.319663} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.065110] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2017.065826] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.066009] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.066431] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2017.066723] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ddcda96-8bdc-42c9-b194-d072bb4d308f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.072568] env[62813]: DEBUG oslo_vmware.api [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Waiting for the task: (returnval){ [ 2017.072568] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]529a79aa-065f-a07f-0d38-ac13ceb93db0" [ 2017.072568] env[62813]: _type = "Task" [ 2017.072568] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.088221] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.088489] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2017.088710] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.278078] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.432575] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "366200bc-8852-45a3-be8b-016265dbfed1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.433041] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "366200bc-8852-45a3-be8b-016265dbfed1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.832133] env[62813]: DEBUG oslo_concurrency.lockutils [None req-22dfe1b3-6496-48b0-bdf3-83cec4615e99 tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquiring lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.166334] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2045.166706] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2046.164464] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.164648] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2046.175783] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] There are 0 instances to clean {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2048.176205] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2049.165752] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.166058] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.166058] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2052.166058] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2052.187534] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.187678] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.187825] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.187986] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188125] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188207] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188306] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188428] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188550] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188662] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2052.188783] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2054.164105] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.165062] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.165062] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.177182] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.177415] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.177584] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.177767] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2055.178911] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1652bf-d6fc-4e8f-863c-0de63b7e09fd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.187917] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d172468-3813-4db6-894f-2acba748cdb0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.201704] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eedb80-66c9-4576-8bcb-a4da7b6dc278 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.208808] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666b708f-6de5-492b-b4f0-f4ea82d1e8a4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.239410] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180730MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2055.239610] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.239759] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.384955] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 176d5151-358a-4b90-9aff-064aa9648618 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385139] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385277] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385407] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385534] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385655] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385774] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.385892] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.386017] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.386139] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2055.398136] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2055.409608] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2055.409917] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2055.410234] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2055.427009] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing inventories for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2055.441593] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating ProviderTree inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2055.441779] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2055.453512] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing aggregate associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, aggregates: None {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2055.473286] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing trait associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2055.613979] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22db112-467e-432d-962d-741f4bb30334 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.621730] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9b6228-3b67-465b-b2ed-01e5fbbc144f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.651159] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72592161-b3b4-4903-85fc-cadb128fa4db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.658991] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a954a21e-7ee7-4798-8814-73781caefa57 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.674215] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2055.685221] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2055.699645] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2055.699848] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.460s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.164086] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.164302] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances with incomplete migration {{(pid=62813) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2057.164712] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2057.165127] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2061.172341] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.481110] env[62813]: WARNING oslo_vmware.rw_handles [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2062.481110] env[62813]: ERROR oslo_vmware.rw_handles [ 2062.481742] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2062.483627] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2062.483886] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Copying Virtual Disk [datastore2] vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/4f4c31d5-3ade-4a0f-845c-92aad197bf41/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2062.484198] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-042577c6-7fff-43f8-a440-458ecbc564fc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.494955] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 2062.494955] env[62813]: value = "task-4267775" [ 2062.494955] env[62813]: _type = "Task" [ 2062.494955] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.503900] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': task-4267775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.006351] env[62813]: DEBUG oslo_vmware.exceptions [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2063.006674] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.007264] env[62813]: ERROR nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2063.007264] env[62813]: Faults: ['InvalidArgument'] [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] Traceback (most recent call last): [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] yield resources [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self.driver.spawn(context, instance, image_meta, [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self._fetch_image_if_missing(context, vi) [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] image_cache(vi, tmp_image_ds_loc) [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] vm_util.copy_virtual_disk( [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] session._wait_for_task(vmdk_copy_task) [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] return self.wait_for_task(task_ref) [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] return evt.wait() [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] result = hub.switch() [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] return self.greenlet.switch() [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self.f(*self.args, **self.kw) [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] raise exceptions.translate_fault(task_info.error) [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] Faults: ['InvalidArgument'] [ 2063.007264] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] [ 2063.008264] env[62813]: INFO nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Terminating instance [ 2063.009194] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.009410] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2063.009650] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc248a66-0032-47eb-a04b-e712de1531ae {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.013021] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2063.013218] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2063.013962] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8df18ba-e042-436d-9fac-425c21c897c5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.021616] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2063.021898] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3512913d-46d4-42de-8ebf-80cd73d53371 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.024132] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2063.024312] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2063.025373] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9e2508d-8b8a-4631-9070-67a42272356f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.030291] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Waiting for the task: (returnval){ [ 2063.030291] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52be89a3-22c5-fa75-1a37-2424777b7ff4" [ 2063.030291] env[62813]: _type = "Task" [ 2063.030291] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.038733] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52be89a3-22c5-fa75-1a37-2424777b7ff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.222306] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2063.222591] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2063.222756] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Deleting the datastore file [datastore2] 176d5151-358a-4b90-9aff-064aa9648618 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2063.223063] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-095c651f-e628-443e-a281-0e780be802c5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.229601] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for the task: (returnval){ [ 2063.229601] env[62813]: value = "task-4267777" [ 2063.229601] env[62813]: _type = "Task" [ 2063.229601] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.238013] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': task-4267777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.541327] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2063.541720] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Creating directory with path [datastore2] vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2063.541949] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cc1dd3a-b137-4fae-952a-30207e87c379 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.554555] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Created directory with path [datastore2] vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2063.554837] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Fetch image to [datastore2] vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2063.554931] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2063.555745] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55e582f-a250-4402-bfad-393ccee39ff3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.563603] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebe913a-ba08-4e95-8d88-0ea0c8c32c05 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.573907] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f515f437-4e81-46d8-ba74-3f6e8c3c60e0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.608039] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cf5dc8-328d-481c-93bb-8b5821487a42 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.614878] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-774287f3-0454-43e3-9e9a-e324a281d04c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.643775] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2063.739809] env[62813]: DEBUG oslo_vmware.api [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Task: {'id': task-4267777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082923} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.740105] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2063.740302] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2063.740482] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2063.740664] env[62813]: INFO nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Took 0.73 seconds to destroy the instance on the hypervisor. [ 2063.743155] env[62813]: DEBUG nova.compute.claims [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2063.743337] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.743558] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.792266] env[62813]: DEBUG oslo_vmware.rw_handles [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2063.853066] env[62813]: DEBUG oslo_vmware.rw_handles [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2063.853066] env[62813]: DEBUG oslo_vmware.rw_handles [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2064.008142] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1169d2d1-8e88-4741-be14-91bf031fd472 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.016009] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fa0c9a-e526-44fc-91bd-a7cf8797ee8e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.045961] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92270d0-c439-401c-8245-e9bf4af2e0a3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.053743] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d0d0b9-c155-4baa-bf69-73d7c8cc9afa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.068361] env[62813]: DEBUG nova.compute.provider_tree [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2064.077016] env[62813]: DEBUG nova.scheduler.client.report [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2064.092025] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.348s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.092647] env[62813]: ERROR nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2064.092647] env[62813]: Faults: ['InvalidArgument'] [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] Traceback (most recent call last): [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self.driver.spawn(context, instance, image_meta, [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self._fetch_image_if_missing(context, vi) [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] image_cache(vi, tmp_image_ds_loc) [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] vm_util.copy_virtual_disk( [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] session._wait_for_task(vmdk_copy_task) [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] return self.wait_for_task(task_ref) [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] return evt.wait() [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] result = hub.switch() [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] return self.greenlet.switch() [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] self.f(*self.args, **self.kw) [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] raise exceptions.translate_fault(task_info.error) [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] Faults: ['InvalidArgument'] [ 2064.092647] env[62813]: ERROR nova.compute.manager [instance: 176d5151-358a-4b90-9aff-064aa9648618] [ 2064.093427] env[62813]: DEBUG nova.compute.utils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2064.094944] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Build of instance 176d5151-358a-4b90-9aff-064aa9648618 was re-scheduled: A specified parameter was not correct: fileType [ 2064.094944] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2064.095335] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2064.095512] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2064.095684] env[62813]: DEBUG nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2064.095844] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2064.465177] env[62813]: DEBUG nova.network.neutron [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.477088] env[62813]: INFO nova.compute.manager [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Took 0.38 seconds to deallocate network for instance. [ 2064.569884] env[62813]: INFO nova.scheduler.client.report [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Deleted allocations for instance 176d5151-358a-4b90-9aff-064aa9648618 [ 2064.594357] env[62813]: DEBUG oslo_concurrency.lockutils [None req-034852c9-6e60-4e5c-b9d6-e299b1777d16 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "176d5151-358a-4b90-9aff-064aa9648618" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 676.571s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.595613] env[62813]: DEBUG oslo_concurrency.lockutils [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "176d5151-358a-4b90-9aff-064aa9648618" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.819s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.595842] env[62813]: DEBUG oslo_concurrency.lockutils [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Acquiring lock "176d5151-358a-4b90-9aff-064aa9648618-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.596067] env[62813]: DEBUG oslo_concurrency.lockutils [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "176d5151-358a-4b90-9aff-064aa9648618-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.596254] env[62813]: DEBUG oslo_concurrency.lockutils [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "176d5151-358a-4b90-9aff-064aa9648618-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.598279] env[62813]: INFO nova.compute.manager [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Terminating instance [ 2064.600026] env[62813]: DEBUG nova.compute.manager [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2064.600229] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2064.600737] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-327f7a6c-4278-461c-8bf4-e451954dfe43 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.614782] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1008bca-8413-4d97-a7bb-72e4315a46b7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.635622] env[62813]: DEBUG nova.compute.manager [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2064.658799] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 176d5151-358a-4b90-9aff-064aa9648618 could not be found. [ 2064.660049] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2064.660049] env[62813]: INFO nova.compute.manager [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2064.660049] env[62813]: DEBUG oslo.service.loopingcall [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2064.660049] env[62813]: DEBUG nova.compute.manager [-] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2064.660049] env[62813]: DEBUG nova.network.neutron [-] [instance: 176d5151-358a-4b90-9aff-064aa9648618] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2064.688923] env[62813]: DEBUG nova.network.neutron [-] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.694209] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.694463] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.696757] env[62813]: INFO nova.compute.claims [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2064.700189] env[62813]: INFO nova.compute.manager [-] [instance: 176d5151-358a-4b90-9aff-064aa9648618] Took 0.04 seconds to deallocate network for instance. [ 2064.801763] env[62813]: DEBUG oslo_concurrency.lockutils [None req-30c4d3cc-8cd6-4135-931a-9f0253aef1b0 tempest-AttachVolumeShelveTestJSON-56149757 tempest-AttachVolumeShelveTestJSON-56149757-project-member] Lock "176d5151-358a-4b90-9aff-064aa9648618" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.803032] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "176d5151-358a-4b90-9aff-064aa9648618" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 316.216s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.803179] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 176d5151-358a-4b90-9aff-064aa9648618] During sync_power_state the instance has a pending task (deleting). Skip. [ 2064.803429] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "176d5151-358a-4b90-9aff-064aa9648618" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.934265] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87e639d-9e60-4b3a-8231-17b7c714ed44 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.942916] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50948afa-0c2a-4ab9-bbd1-05dcc9e62c65 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.975853] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e621346f-936e-474b-a165-aaeedfde410a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.983957] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d905c60c-e12c-47d7-a35d-066a604546a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.997876] env[62813]: DEBUG nova.compute.provider_tree [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2065.006880] env[62813]: DEBUG nova.scheduler.client.report [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2065.021767] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.022330] env[62813]: DEBUG nova.compute.manager [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2065.058804] env[62813]: DEBUG nova.compute.utils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2065.064030] env[62813]: DEBUG nova.compute.manager [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2065.064030] env[62813]: DEBUG nova.network.neutron [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2065.073678] env[62813]: DEBUG nova.compute.manager [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2065.144142] env[62813]: DEBUG nova.compute.manager [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2065.173453] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2065.173728] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2065.173950] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2065.174180] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2065.174374] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2065.174542] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2065.174777] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2065.174937] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2065.175128] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2065.175300] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2065.175599] env[62813]: DEBUG nova.virt.hardware [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2065.176543] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0322ac97-c5db-487d-afe7-b79e5c48167a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.186207] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3502fc-df24-450d-8975-500d16ce6151 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.320251] env[62813]: DEBUG nova.policy [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8176143266e84a458bfc13cc66983203', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87677daaac17461a97538cd8740330f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2065.648760] env[62813]: DEBUG nova.network.neutron [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Successfully created port: d3371798-9d46-45af-b470-c5bdcc395f54 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2066.407317] env[62813]: DEBUG nova.network.neutron [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Successfully updated port: d3371798-9d46-45af-b470-c5bdcc395f54 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2066.421244] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "refresh_cache-926a846d-f902-4ec3-898e-439f10b4ee68" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.421244] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquired lock "refresh_cache-926a846d-f902-4ec3-898e-439f10b4ee68" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.421244] env[62813]: DEBUG nova.network.neutron [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2066.463360] env[62813]: DEBUG nova.network.neutron [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2066.509012] env[62813]: DEBUG nova.compute.manager [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Received event network-vif-plugged-d3371798-9d46-45af-b470-c5bdcc395f54 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2066.509305] env[62813]: DEBUG oslo_concurrency.lockutils [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] Acquiring lock "926a846d-f902-4ec3-898e-439f10b4ee68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.509550] env[62813]: DEBUG oslo_concurrency.lockutils [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] Lock "926a846d-f902-4ec3-898e-439f10b4ee68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.509745] env[62813]: DEBUG oslo_concurrency.lockutils [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] Lock "926a846d-f902-4ec3-898e-439f10b4ee68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.509878] env[62813]: DEBUG nova.compute.manager [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] No waiting events found dispatching network-vif-plugged-d3371798-9d46-45af-b470-c5bdcc395f54 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2066.510102] env[62813]: WARNING nova.compute.manager [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Received unexpected event network-vif-plugged-d3371798-9d46-45af-b470-c5bdcc395f54 for instance with vm_state building and task_state spawning. [ 2066.510287] env[62813]: DEBUG nova.compute.manager [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Received event network-changed-d3371798-9d46-45af-b470-c5bdcc395f54 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2066.510458] env[62813]: DEBUG nova.compute.manager [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Refreshing instance network info cache due to event network-changed-d3371798-9d46-45af-b470-c5bdcc395f54. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2066.510629] env[62813]: DEBUG oslo_concurrency.lockutils [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] Acquiring lock "refresh_cache-926a846d-f902-4ec3-898e-439f10b4ee68" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.705664] env[62813]: DEBUG nova.network.neutron [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Updating instance_info_cache with network_info: [{"id": "d3371798-9d46-45af-b470-c5bdcc395f54", "address": "fa:16:3e:2f:68:36", "network": {"id": "3e659b4f-671d-4144-82d6-4e80fcf365c9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1216202715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87677daaac17461a97538cd8740330f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3371798-9d", "ovs_interfaceid": "d3371798-9d46-45af-b470-c5bdcc395f54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.718288] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Releasing lock "refresh_cache-926a846d-f902-4ec3-898e-439f10b4ee68" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.718645] env[62813]: DEBUG nova.compute.manager [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Instance network_info: |[{"id": "d3371798-9d46-45af-b470-c5bdcc395f54", "address": "fa:16:3e:2f:68:36", "network": {"id": "3e659b4f-671d-4144-82d6-4e80fcf365c9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1216202715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87677daaac17461a97538cd8740330f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3371798-9d", "ovs_interfaceid": "d3371798-9d46-45af-b470-c5bdcc395f54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2066.718975] env[62813]: DEBUG oslo_concurrency.lockutils [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] Acquired lock "refresh_cache-926a846d-f902-4ec3-898e-439f10b4ee68" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.719179] env[62813]: DEBUG nova.network.neutron [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Refreshing network info cache for port d3371798-9d46-45af-b470-c5bdcc395f54 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2066.720545] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:68:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b5f9472-1844-4c99-8804-8f193cfff562', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3371798-9d46-45af-b470-c5bdcc395f54', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2066.728066] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Creating folder: Project (87677daaac17461a97538cd8740330f2). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2066.728965] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e9bc529-bcf5-42cf-879e-a36d81bf9a39 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.741339] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Created folder: Project (87677daaac17461a97538cd8740330f2) in parent group-v840812. [ 2066.741573] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Creating folder: Instances. Parent ref: group-v840927. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2066.741817] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f76340fb-54d7-4591-8d7a-9dafeb4861fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.750379] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Created folder: Instances in parent group-v840927. [ 2066.750621] env[62813]: DEBUG oslo.service.loopingcall [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.750806] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2066.751016] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-096d12bc-47f0-413b-a363-814b948e21d4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.772946] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2066.772946] env[62813]: value = "task-4267780" [ 2066.772946] env[62813]: _type = "Task" [ 2066.772946] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.781324] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267780, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.009633] env[62813]: DEBUG nova.network.neutron [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Updated VIF entry in instance network info cache for port d3371798-9d46-45af-b470-c5bdcc395f54. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2067.010251] env[62813]: DEBUG nova.network.neutron [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Updating instance_info_cache with network_info: [{"id": "d3371798-9d46-45af-b470-c5bdcc395f54", "address": "fa:16:3e:2f:68:36", "network": {"id": "3e659b4f-671d-4144-82d6-4e80fcf365c9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1216202715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87677daaac17461a97538cd8740330f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3371798-9d", "ovs_interfaceid": "d3371798-9d46-45af-b470-c5bdcc395f54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.022536] env[62813]: DEBUG oslo_concurrency.lockutils [req-d2de25fe-7174-48e1-98ee-e3316ad1686b req-48f0397f-a6b2-4f07-82c0-41ab22b1df13 service nova] Releasing lock "refresh_cache-926a846d-f902-4ec3-898e-439f10b4ee68" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.283014] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267780, 'name': CreateVM_Task, 'duration_secs': 0.312362} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.283578] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2067.284311] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.284472] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.284787] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2067.285052] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4083ab9b-1b0a-410d-be81-4a58465e6654 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.290198] env[62813]: DEBUG oslo_vmware.api [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Waiting for the task: (returnval){ [ 2067.290198] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ed35c3-84e9-1fc1-3b6e-ab4c23243c54" [ 2067.290198] env[62813]: _type = "Task" [ 2067.290198] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.299440] env[62813]: DEBUG oslo_vmware.api [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ed35c3-84e9-1fc1-3b6e-ab4c23243c54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.800950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.801319] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2067.801451] env[62813]: DEBUG oslo_concurrency.lockutils [None req-3fd415f3-cb13-4625-aed5-5d238e41c51e tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.164721] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2106.165054] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2108.165324] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2109.163296] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2110.967545] env[62813]: WARNING oslo_vmware.rw_handles [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2110.967545] env[62813]: ERROR oslo_vmware.rw_handles [ 2110.968225] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2110.969814] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2110.970074] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Copying Virtual Disk [datastore2] vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/a8618a61-3eee-4deb-8ad8-59281447189f/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2110.970368] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7b59e2e-9f8a-4fe8-9abc-f5bfc4e0bc35 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.979087] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Waiting for the task: (returnval){ [ 2110.979087] env[62813]: value = "task-4267781" [ 2110.979087] env[62813]: _type = "Task" [ 2110.979087] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.987374] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Task: {'id': task-4267781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.489803] env[62813]: DEBUG oslo_vmware.exceptions [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2111.489973] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.490536] env[62813]: ERROR nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2111.490536] env[62813]: Faults: ['InvalidArgument'] [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Traceback (most recent call last): [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] yield resources [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self.driver.spawn(context, instance, image_meta, [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self._fetch_image_if_missing(context, vi) [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] image_cache(vi, tmp_image_ds_loc) [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] vm_util.copy_virtual_disk( [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] session._wait_for_task(vmdk_copy_task) [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] return self.wait_for_task(task_ref) [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] return evt.wait() [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] result = hub.switch() [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] return self.greenlet.switch() [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self.f(*self.args, **self.kw) [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] raise exceptions.translate_fault(task_info.error) [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Faults: ['InvalidArgument'] [ 2111.490536] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] [ 2111.491481] env[62813]: INFO nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Terminating instance [ 2111.492738] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.492738] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2111.492941] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7328bbdd-9265-4aae-aa07-2ea103b3e19b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.495148] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2111.495347] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2111.496084] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b61650b-d7ab-4798-933c-5e1a06ee46ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.504059] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2111.504059] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80283249-5c27-4b24-b876-c6d0ae90e034 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.506319] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2111.506534] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2111.507522] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d26c59cf-8d9e-4b5b-b5f8-907e26ac31e8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.512311] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Waiting for the task: (returnval){ [ 2111.512311] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52d948f7-06cc-7724-79f7-e795eb818ca7" [ 2111.512311] env[62813]: _type = "Task" [ 2111.512311] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.520521] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52d948f7-06cc-7724-79f7-e795eb818ca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.585079] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2111.585292] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2111.585476] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Deleting the datastore file [datastore2] cd5b7232-5d47-43c6-874e-6f9e6b45f420 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2111.585745] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8ed953e-c69d-4fe9-81a9-4784c50bc3c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.592549] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Waiting for the task: (returnval){ [ 2111.592549] env[62813]: value = "task-4267783" [ 2111.592549] env[62813]: _type = "Task" [ 2111.592549] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.600729] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Task: {'id': task-4267783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.023802] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2112.024197] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Creating directory with path [datastore2] vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2112.024314] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81e65783-e0a8-495c-a526-58e2180bc9cc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.036747] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Created directory with path [datastore2] vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2112.036971] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Fetch image to [datastore2] vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2112.037169] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2112.037952] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a95d400-8f4b-46b1-b99d-85946ee131c5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.045819] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdbdf1e-6432-4eb2-bcca-058ca57ffcdd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.055862] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a50b548-3bf5-4ba7-95d3-60bb62c622f7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.088378] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea065e05-b6da-4483-a9f0-7041bb6d73fb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.097971] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-71e4a2e3-d7b5-4a70-8f42-26ff6e2563d7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.104804] env[62813]: DEBUG oslo_vmware.api [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Task: {'id': task-4267783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066215} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.105141] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2112.105247] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2112.105424] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2112.105633] env[62813]: INFO nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2112.107897] env[62813]: DEBUG nova.compute.claims [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2112.108094] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.108325] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.126212] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2112.263722] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2112.325843] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2112.326045] env[62813]: DEBUG oslo_vmware.rw_handles [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2112.359061] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6805994e-ff6f-45a9-b0a8-128d3fac117d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.366471] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2454d6c7-c00c-4463-816d-024a4123610f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.397214] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf2fb8f-8db1-47e5-af89-fde0e7bf4e47 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.404991] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea16b8a7-beea-4d2a-91e6-863984f56512 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.418538] env[62813]: DEBUG nova.compute.provider_tree [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2112.428669] env[62813]: DEBUG nova.scheduler.client.report [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2112.442787] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.443319] env[62813]: ERROR nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2112.443319] env[62813]: Faults: ['InvalidArgument'] [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Traceback (most recent call last): [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self.driver.spawn(context, instance, image_meta, [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self._fetch_image_if_missing(context, vi) [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] image_cache(vi, tmp_image_ds_loc) [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] vm_util.copy_virtual_disk( [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] session._wait_for_task(vmdk_copy_task) [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] return self.wait_for_task(task_ref) [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] return evt.wait() [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] result = hub.switch() [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] return self.greenlet.switch() [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] self.f(*self.args, **self.kw) [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] raise exceptions.translate_fault(task_info.error) [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Faults: ['InvalidArgument'] [ 2112.443319] env[62813]: ERROR nova.compute.manager [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] [ 2112.444171] env[62813]: DEBUG nova.compute.utils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2112.445533] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Build of instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 was re-scheduled: A specified parameter was not correct: fileType [ 2112.445533] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2112.445909] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2112.446159] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2112.446362] env[62813]: DEBUG nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2112.446538] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2113.064066] env[62813]: DEBUG nova.network.neutron [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.079464] env[62813]: INFO nova.compute.manager [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Took 0.63 seconds to deallocate network for instance. [ 2113.187743] env[62813]: INFO nova.scheduler.client.report [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Deleted allocations for instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 [ 2113.214203] env[62813]: DEBUG oslo_concurrency.lockutils [None req-31765d1f-5046-4979-a88b-adb4af749548 tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 679.254s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.215813] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 484.006s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.215813] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Acquiring lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.215987] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.220025] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.220955] env[62813]: INFO nova.compute.manager [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Terminating instance [ 2113.224421] env[62813]: DEBUG nova.compute.manager [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2113.225550] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2113.226263] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99492d0b-7183-41a3-8886-ed2fe605ffa0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.230696] env[62813]: DEBUG nova.compute.manager [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2113.242675] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d298ee-238c-4fcc-97fd-b9351b0e104f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.272465] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cd5b7232-5d47-43c6-874e-6f9e6b45f420 could not be found. [ 2113.272688] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2113.272881] env[62813]: INFO nova.compute.manager [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2113.273200] env[62813]: DEBUG oslo.service.loopingcall [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2113.274076] env[62813]: DEBUG nova.compute.manager [-] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2113.274171] env[62813]: DEBUG nova.network.neutron [-] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2113.290882] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.291153] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.292812] env[62813]: INFO nova.compute.claims [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2113.306141] env[62813]: DEBUG nova.network.neutron [-] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.323855] env[62813]: INFO nova.compute.manager [-] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] Took 0.05 seconds to deallocate network for instance. [ 2113.428231] env[62813]: DEBUG oslo_concurrency.lockutils [None req-0726e34d-5825-46f8-8c29-298d7af4247f tempest-InstanceActionsV221TestJSON-703471036 tempest-InstanceActionsV221TestJSON-703471036-project-member] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.429582] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 364.842s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.429582] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: cd5b7232-5d47-43c6-874e-6f9e6b45f420] During sync_power_state the instance has a pending task (deleting). Skip. [ 2113.429582] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "cd5b7232-5d47-43c6-874e-6f9e6b45f420" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.482521] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47096eed-85db-49ee-9292-519a37b71333 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.490667] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6c563b-1703-4adf-9e5f-090279e0a6aa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.522113] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a911a15-ad6d-4039-9631-24a723a19703 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.529972] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db576225-96a6-4623-a561-f614197ac4c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.543386] env[62813]: DEBUG nova.compute.provider_tree [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.553278] env[62813]: DEBUG nova.scheduler.client.report [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2113.569446] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.278s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.569937] env[62813]: DEBUG nova.compute.manager [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2113.606063] env[62813]: DEBUG nova.compute.utils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2113.607848] env[62813]: DEBUG nova.compute.manager [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2113.607848] env[62813]: DEBUG nova.network.neutron [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2113.619113] env[62813]: DEBUG nova.compute.manager [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2113.676180] env[62813]: DEBUG nova.policy [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e577489b4e784e5abaa6a755ab08a2c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d2731f99cdc4553bd301f33c4df1517', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2113.688087] env[62813]: DEBUG nova.compute.manager [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2113.719658] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2113.719841] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2113.720077] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2113.720345] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2113.720511] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2113.720675] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2113.720932] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2113.722109] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2113.722109] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2113.722109] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2113.722109] env[62813]: DEBUG nova.virt.hardware [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2113.724949] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1b53ae-b0bf-424a-b97e-7c72bebd38d6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.734131] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7c6024-ec08-4311-9199-48e6321e494c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.106325] env[62813]: DEBUG nova.network.neutron [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Successfully created port: 3207e62f-8b9e-4ed6-b002-3a23c6011a06 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2114.165889] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.165889] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2114.165889] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2114.190173] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2114.840721] env[62813]: DEBUG nova.network.neutron [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Successfully updated port: 3207e62f-8b9e-4ed6-b002-3a23c6011a06 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2114.853567] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "refresh_cache-366200bc-8852-45a3-be8b-016265dbfed1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.853755] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "refresh_cache-366200bc-8852-45a3-be8b-016265dbfed1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.853862] env[62813]: DEBUG nova.network.neutron [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2114.895729] env[62813]: DEBUG nova.network.neutron [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2115.162183] env[62813]: DEBUG nova.compute.manager [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Received event network-vif-plugged-3207e62f-8b9e-4ed6-b002-3a23c6011a06 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2115.162425] env[62813]: DEBUG oslo_concurrency.lockutils [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] Acquiring lock "366200bc-8852-45a3-be8b-016265dbfed1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.162628] env[62813]: DEBUG oslo_concurrency.lockutils [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] Lock "366200bc-8852-45a3-be8b-016265dbfed1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.162801] env[62813]: DEBUG oslo_concurrency.lockutils [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] Lock "366200bc-8852-45a3-be8b-016265dbfed1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.163103] env[62813]: DEBUG nova.compute.manager [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] No waiting events found dispatching network-vif-plugged-3207e62f-8b9e-4ed6-b002-3a23c6011a06 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2115.163200] env[62813]: WARNING nova.compute.manager [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Received unexpected event network-vif-plugged-3207e62f-8b9e-4ed6-b002-3a23c6011a06 for instance with vm_state building and task_state spawning. [ 2115.163355] env[62813]: DEBUG nova.compute.manager [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Received event network-changed-3207e62f-8b9e-4ed6-b002-3a23c6011a06 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2115.163512] env[62813]: DEBUG nova.compute.manager [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Refreshing instance network info cache due to event network-changed-3207e62f-8b9e-4ed6-b002-3a23c6011a06. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2115.163680] env[62813]: DEBUG oslo_concurrency.lockutils [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] Acquiring lock "refresh_cache-366200bc-8852-45a3-be8b-016265dbfed1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.170085] env[62813]: DEBUG nova.network.neutron [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Updating instance_info_cache with network_info: [{"id": "3207e62f-8b9e-4ed6-b002-3a23c6011a06", "address": "fa:16:3e:dc:91:bb", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3207e62f-8b", "ovs_interfaceid": "3207e62f-8b9e-4ed6-b002-3a23c6011a06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.181674] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "refresh_cache-366200bc-8852-45a3-be8b-016265dbfed1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.181995] env[62813]: DEBUG nova.compute.manager [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Instance network_info: |[{"id": "3207e62f-8b9e-4ed6-b002-3a23c6011a06", "address": "fa:16:3e:dc:91:bb", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3207e62f-8b", "ovs_interfaceid": "3207e62f-8b9e-4ed6-b002-3a23c6011a06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2115.182364] env[62813]: DEBUG oslo_concurrency.lockutils [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] Acquired lock "refresh_cache-366200bc-8852-45a3-be8b-016265dbfed1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.182711] env[62813]: DEBUG nova.network.neutron [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Refreshing network info cache for port 3207e62f-8b9e-4ed6-b002-3a23c6011a06 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2115.183981] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:91:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3207e62f-8b9e-4ed6-b002-3a23c6011a06', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2115.191545] env[62813]: DEBUG oslo.service.loopingcall [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2115.192609] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2115.195049] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9842373-a71d-4876-8dce-fa70ea24df10 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.216153] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2115.216153] env[62813]: value = "task-4267784" [ 2115.216153] env[62813]: _type = "Task" [ 2115.216153] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.225727] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267784, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.553317] env[62813]: DEBUG nova.network.neutron [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Updated VIF entry in instance network info cache for port 3207e62f-8b9e-4ed6-b002-3a23c6011a06. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2115.553317] env[62813]: DEBUG nova.network.neutron [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Updating instance_info_cache with network_info: [{"id": "3207e62f-8b9e-4ed6-b002-3a23c6011a06", "address": "fa:16:3e:dc:91:bb", "network": {"id": "0f261672-1246-4a37-8b67-a9d24d314571", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-158054522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d2731f99cdc4553bd301f33c4df1517", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3207e62f-8b", "ovs_interfaceid": "3207e62f-8b9e-4ed6-b002-3a23c6011a06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.561955] env[62813]: DEBUG oslo_concurrency.lockutils [req-94a600c5-2c80-4200-831e-d5551e506f8a req-8c4d8f36-604d-4477-b840-8c7b9a4d87ee service nova] Releasing lock "refresh_cache-366200bc-8852-45a3-be8b-016265dbfed1" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.727680] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267784, 'name': CreateVM_Task, 'duration_secs': 0.319526} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.729049] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2115.729049] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.729049] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.729323] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2115.729541] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb9ba9e7-f915-4237-a0c0-89b7084d0cb4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.734781] env[62813]: DEBUG oslo_vmware.api [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 2115.734781] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52001651-5952-9863-ad1d-5bbcb15d0edc" [ 2115.734781] env[62813]: _type = "Task" [ 2115.734781] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.743685] env[62813]: DEBUG oslo_vmware.api [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52001651-5952-9863-ad1d-5bbcb15d0edc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.163843] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.164112] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.245978] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.246550] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2116.246740] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b29cd0d0-1f32-4b77-8a1a-a5514c214345 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.163712] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.176675] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.177043] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.177158] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.177339] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2117.178539] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462abd5a-ce54-4f2a-8fc0-4a2ef46e02c1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.189260] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a6e420-5f71-4771-b1df-ed17e481df91 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.204836] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338a9a10-1921-4d4d-9eeb-65798d1f552f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.212534] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c992d227-d924-4e0d-ade6-a233c2380612 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.244430] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180749MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2117.244591] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.244799] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.327767] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0d095679-87c7-46f6-8869-42b0f22127e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.327937] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance f7777e07-72df-4af1-8f22-ccb71db0e06a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328086] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328222] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328346] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328468] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328591] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328708] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328825] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.328940] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2117.329172] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2117.329335] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2117.468010] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca493c4-0e91-4ac7-b963-e99312d0a5e1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.476447] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f74139-af00-41fa-b9c5-f2de328d9c72 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.506198] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d3e594-f7bc-4331-bd6e-ae77a2752c53 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.514249] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1544f912-9fff-4ed3-a67a-7a421f6daf85 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.527753] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2117.536880] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2117.551826] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2117.552035] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.307s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.549150] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2121.164633] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2124.160016] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.152234] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquiring lock "10ce3bdd-0caa-47ff-bd11-90c038cc6be8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.152596] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Lock "10ce3bdd-0caa-47ff-bd11-90c038cc6be8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.518066] env[62813]: WARNING oslo_vmware.rw_handles [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2157.518066] env[62813]: ERROR oslo_vmware.rw_handles [ 2157.518066] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2157.519560] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2157.519817] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Copying Virtual Disk [datastore2] vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/6cf6d72c-5746-4f24-a642-48d9f5dc0312/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2157.520125] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c43af4ef-a02b-40d9-b6cf-be9bdfb927b2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.529043] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Waiting for the task: (returnval){ [ 2157.529043] env[62813]: value = "task-4267785" [ 2157.529043] env[62813]: _type = "Task" [ 2157.529043] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.537699] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Task: {'id': task-4267785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.039605] env[62813]: DEBUG oslo_vmware.exceptions [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2158.039896] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.040478] env[62813]: ERROR nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2158.040478] env[62813]: Faults: ['InvalidArgument'] [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Traceback (most recent call last): [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] yield resources [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self.driver.spawn(context, instance, image_meta, [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self._fetch_image_if_missing(context, vi) [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] image_cache(vi, tmp_image_ds_loc) [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] vm_util.copy_virtual_disk( [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] session._wait_for_task(vmdk_copy_task) [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] return self.wait_for_task(task_ref) [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] return evt.wait() [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] result = hub.switch() [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] return self.greenlet.switch() [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self.f(*self.args, **self.kw) [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] raise exceptions.translate_fault(task_info.error) [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Faults: ['InvalidArgument'] [ 2158.040478] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] [ 2158.041483] env[62813]: INFO nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Terminating instance [ 2158.042434] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.042681] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2158.042933] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28ec5c75-1d7e-41d6-addd-16b5d9a23311 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.045362] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2158.045565] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2158.046291] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd6e5ca-6fcd-4cf0-bc6f-3b8ce52cd790 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.052882] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2158.053095] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12d1d922-8223-4303-b6ff-9cd2114c6a61 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.055337] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2158.055540] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2158.056488] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbbb588a-2006-4037-a425-54df45d639f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.061332] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Waiting for the task: (returnval){ [ 2158.061332] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ae3fa4-5555-783b-8bd3-d062b33965ed" [ 2158.061332] env[62813]: _type = "Task" [ 2158.061332] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.068889] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ae3fa4-5555-783b-8bd3-d062b33965ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.128028] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2158.128279] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2158.128467] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Deleting the datastore file [datastore2] 0d095679-87c7-46f6-8869-42b0f22127e8 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2158.128740] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cbabdd3-be5b-4fac-81e9-40828a273373 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.136043] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Waiting for the task: (returnval){ [ 2158.136043] env[62813]: value = "task-4267787" [ 2158.136043] env[62813]: _type = "Task" [ 2158.136043] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.145247] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Task: {'id': task-4267787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.571815] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2158.572204] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Creating directory with path [datastore2] vmware_temp/df7e6cc9-cfaa-4fb5-838e-379be648c7bb/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2158.572444] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8fffcb2-3e19-47b6-b4d7-b86d765a2dee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.583930] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Created directory with path [datastore2] vmware_temp/df7e6cc9-cfaa-4fb5-838e-379be648c7bb/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2158.584140] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Fetch image to [datastore2] vmware_temp/df7e6cc9-cfaa-4fb5-838e-379be648c7bb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2158.584315] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/df7e6cc9-cfaa-4fb5-838e-379be648c7bb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2158.585063] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa39d44-bf05-426d-91cc-3eab8fa1b9b3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.592465] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a4905c-1d6b-48b4-9a94-7c1324fc333c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.603298] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a37985-9752-4149-b091-0741e5d9616c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.635754] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d4243a-ded5-4c9e-b9bb-708223a3e5a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.646753] env[62813]: DEBUG oslo_vmware.api [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Task: {'id': task-4267787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067059} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.648322] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2158.648514] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2158.648690] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2158.648871] env[62813]: INFO nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2158.650751] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ed6a2fc9-cffd-4d38-bb08-c3ad70e91e12 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.652773] env[62813]: DEBUG nova.compute.claims [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2158.652953] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.653184] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.688220] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2158.832661] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.833474] env[62813]: ERROR nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Traceback (most recent call last): [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = getattr(controller, method)(*args, **kwargs) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._get(image_id) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] resp, body = self.http_client.get(url, headers=header) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.request(url, 'GET', **kwargs) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._handle_response(resp) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise exc.from_response(resp, resp.content) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] During handling of the above exception, another exception occurred: [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Traceback (most recent call last): [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] yield resources [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self.driver.spawn(context, instance, image_meta, [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._fetch_image_if_missing(context, vi) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] image_fetch(context, vi, tmp_image_ds_loc) [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] images.fetch_image( [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2158.833474] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] metadata = IMAGE_API.get(context, image_ref) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return session.show(context, image_id, [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] _reraise_translated_image_exception(image_id) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise new_exc.with_traceback(exc_trace) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = getattr(controller, method)(*args, **kwargs) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._get(image_id) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] resp, body = self.http_client.get(url, headers=header) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.request(url, 'GET', **kwargs) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._handle_response(resp) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise exc.from_response(resp, resp.content) [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 2158.834578] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2158.834578] env[62813]: INFO nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Terminating instance [ 2158.835763] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.835763] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2158.836157] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2158.836318] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquired lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2158.836484] env[62813]: DEBUG nova.network.neutron [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2158.839731] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bccb643a-4e16-4ce8-8d75-40efb55d94e6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.849647] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2158.849839] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2158.851068] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f2bbda-aea1-4469-9c35-f33bc41ef46f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.860033] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for the task: (returnval){ [ 2158.860033] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]524b4821-3494-292d-9480-ac3f5505e88b" [ 2158.860033] env[62813]: _type = "Task" [ 2158.860033] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.867785] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]524b4821-3494-292d-9480-ac3f5505e88b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.869656] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df85c84-f009-43bd-bb26-d105fcdb05d5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.872728] env[62813]: DEBUG nova.network.neutron [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2158.879723] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea98a52-4bee-4343-9d24-4589b1f47ed2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.913810] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cf5e57-e2de-4b23-87b0-e924157a27fe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.922955] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8a59bf-8ffc-40ff-a6d2-b1ec74dedf52 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.937424] env[62813]: DEBUG nova.compute.provider_tree [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.946432] env[62813]: DEBUG nova.scheduler.client.report [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2158.961623] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.962213] env[62813]: ERROR nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2158.962213] env[62813]: Faults: ['InvalidArgument'] [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Traceback (most recent call last): [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self.driver.spawn(context, instance, image_meta, [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self._fetch_image_if_missing(context, vi) [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] image_cache(vi, tmp_image_ds_loc) [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] vm_util.copy_virtual_disk( [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] session._wait_for_task(vmdk_copy_task) [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] return self.wait_for_task(task_ref) [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] return evt.wait() [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] result = hub.switch() [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] return self.greenlet.switch() [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] self.f(*self.args, **self.kw) [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] raise exceptions.translate_fault(task_info.error) [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Faults: ['InvalidArgument'] [ 2158.962213] env[62813]: ERROR nova.compute.manager [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] [ 2158.963211] env[62813]: DEBUG nova.compute.utils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2158.964582] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Build of instance 0d095679-87c7-46f6-8869-42b0f22127e8 was re-scheduled: A specified parameter was not correct: fileType [ 2158.964582] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2158.964958] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2158.965159] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2158.965337] env[62813]: DEBUG nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2158.965500] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2159.055906] env[62813]: DEBUG nova.network.neutron [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.065743] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Releasing lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2159.066204] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2159.066406] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2159.067707] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a1980b-09f1-4461-ba34-b2812fe380cf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.076438] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2159.076674] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-740dc58f-720c-4d48-972e-9d9c60bd1184 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.105169] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2159.105533] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2159.105779] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Deleting the datastore file [datastore2] f7777e07-72df-4af1-8f22-ccb71db0e06a {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2159.106297] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec25e4f1-f10c-437d-b9e7-dd791a7d4d1d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.113566] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Waiting for the task: (returnval){ [ 2159.113566] env[62813]: value = "task-4267789" [ 2159.113566] env[62813]: _type = "Task" [ 2159.113566] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.122432] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Task: {'id': task-4267789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.370478] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2159.370752] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Creating directory with path [datastore2] vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2159.371012] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c46a4afd-1743-47f6-a99e-32f95c32e018 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.383799] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Created directory with path [datastore2] vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2159.383935] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Fetch image to [datastore2] vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2159.384112] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2159.388024] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1261fc00-b669-423e-88bd-2c05c36d3e7e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.392518] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d50e21-6e07-44b2-ba59-653828149cea {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.402174] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a81ecb0-120f-499d-ab01-f403420b5913 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.437773] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8002192f-c2d2-4252-acb1-6b277eee92d2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.445460] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5d366f96-3183-4b3b-8f91-d09ecdf9461f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.471302] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2159.508281] env[62813]: DEBUG nova.network.neutron [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.525271] env[62813]: INFO nova.compute.manager [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Took 0.56 seconds to deallocate network for instance. [ 2159.541370] env[62813]: DEBUG oslo_vmware.rw_handles [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2159.610904] env[62813]: DEBUG oslo_vmware.rw_handles [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2159.611217] env[62813]: DEBUG oslo_vmware.rw_handles [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2159.626980] env[62813]: DEBUG oslo_vmware.api [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Task: {'id': task-4267789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043977} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.627602] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2159.627830] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2159.628054] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2159.628247] env[62813]: INFO nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2159.628506] env[62813]: DEBUG oslo.service.loopingcall [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2159.628727] env[62813]: DEBUG nova.compute.manager [-] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network deallocation for instance since networking was not requested. {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2159.631120] env[62813]: DEBUG nova.compute.claims [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2159.631300] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.631540] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.686089] env[62813]: INFO nova.scheduler.client.report [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Deleted allocations for instance 0d095679-87c7-46f6-8869-42b0f22127e8 [ 2159.709871] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d1fec9c5-801e-42a0-9325-4e33623e34aa tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "0d095679-87c7-46f6-8869-42b0f22127e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 663.163s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.711146] env[62813]: DEBUG oslo_concurrency.lockutils [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "0d095679-87c7-46f6-8869-42b0f22127e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 466.575s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.711364] env[62813]: DEBUG oslo_concurrency.lockutils [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Acquiring lock "0d095679-87c7-46f6-8869-42b0f22127e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.711573] env[62813]: DEBUG oslo_concurrency.lockutils [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "0d095679-87c7-46f6-8869-42b0f22127e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.711996] env[62813]: DEBUG oslo_concurrency.lockutils [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "0d095679-87c7-46f6-8869-42b0f22127e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.714053] env[62813]: INFO nova.compute.manager [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Terminating instance [ 2159.716037] env[62813]: DEBUG nova.compute.manager [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2159.716254] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2159.716740] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f94131b-e00c-45c2-b90c-a245bbfdf841 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.727032] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ea1bf0-309c-4986-a6d3-7cd0a3649419 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.742113] env[62813]: DEBUG nova.compute.manager [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2159.766834] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d095679-87c7-46f6-8869-42b0f22127e8 could not be found. [ 2159.767248] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2159.767248] env[62813]: INFO nova.compute.manager [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2159.767550] env[62813]: DEBUG oslo.service.loopingcall [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2159.770603] env[62813]: DEBUG nova.compute.manager [-] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2159.770720] env[62813]: DEBUG nova.network.neutron [-] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2159.799817] env[62813]: DEBUG nova.network.neutron [-] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2159.801869] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.812159] env[62813]: INFO nova.compute.manager [-] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] Took 0.04 seconds to deallocate network for instance. [ 2159.851867] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7336777b-7549-4c96-8a6f-37b3827212cb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.863996] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e26695-1e33-437b-b467-c6f38f53fa71 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.897732] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01ce98a-f88b-4284-a1cb-eabfe678edf9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.906532] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad16bd8-3221-404b-b43b-487c25be777f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.921136] env[62813]: DEBUG nova.compute.provider_tree [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2159.929555] env[62813]: DEBUG oslo_concurrency.lockutils [None req-478eb40a-65ac-4951-a38a-541e64ddcd11 tempest-ServersTestFqdnHostnames-617607771 tempest-ServersTestFqdnHostnames-617607771-project-member] Lock "0d095679-87c7-46f6-8869-42b0f22127e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.930416] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "0d095679-87c7-46f6-8869-42b0f22127e8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 411.343s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.930619] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0d095679-87c7-46f6-8869-42b0f22127e8] During sync_power_state the instance has a pending task (deleting). Skip. [ 2159.930816] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "0d095679-87c7-46f6-8869-42b0f22127e8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.932085] env[62813]: DEBUG nova.scheduler.client.report [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2159.945781] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.946517] env[62813]: ERROR nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Traceback (most recent call last): [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = getattr(controller, method)(*args, **kwargs) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._get(image_id) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] resp, body = self.http_client.get(url, headers=header) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.request(url, 'GET', **kwargs) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._handle_response(resp) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise exc.from_response(resp, resp.content) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] During handling of the above exception, another exception occurred: [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Traceback (most recent call last): [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self.driver.spawn(context, instance, image_meta, [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._fetch_image_if_missing(context, vi) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] image_fetch(context, vi, tmp_image_ds_loc) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] images.fetch_image( [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] metadata = IMAGE_API.get(context, image_ref) [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2159.946517] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return session.show(context, image_id, [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] _reraise_translated_image_exception(image_id) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise new_exc.with_traceback(exc_trace) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = getattr(controller, method)(*args, **kwargs) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._get(image_id) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] resp, body = self.http_client.get(url, headers=header) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.request(url, 'GET', **kwargs) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self._handle_response(resp) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise exc.from_response(resp, resp.content) [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] nova.exception.ImageNotAuthorized: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. [ 2159.947458] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2159.947458] env[62813]: DEBUG nova.compute.utils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2159.948333] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.147s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.949759] env[62813]: INFO nova.compute.claims [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2159.952509] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Build of instance f7777e07-72df-4af1-8f22-ccb71db0e06a was re-scheduled: Not authorized for image f6ee7c32-a26c-4731-80b9-1e546ea30e47. {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2159.953049] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2159.953287] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2159.953438] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquired lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.953619] env[62813]: DEBUG nova.network.neutron [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2159.980882] env[62813]: DEBUG nova.network.neutron [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2160.049131] env[62813]: DEBUG nova.network.neutron [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.058362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Releasing lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.058646] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2160.058842] env[62813]: DEBUG nova.compute.manager [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Skipping network deallocation for instance since networking was not requested. {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2160.128247] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fca162-c287-45ae-acb6-2d675810fe16 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.140608] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54736b30-3a8a-4dd6-acfe-3472beae7d79 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.172396] env[62813]: INFO nova.scheduler.client.report [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Deleted allocations for instance f7777e07-72df-4af1-8f22-ccb71db0e06a [ 2160.178288] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b839325c-5355-49cb-a45b-62d44474bcee {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.187818] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee56badd-2005-4dd8-878f-ea718d2e2fbc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.194497] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c00e287d-f519-4114-bea2-f79684dab5fd tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.487s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.194761] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.823s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.194971] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "f7777e07-72df-4af1-8f22-ccb71db0e06a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.195191] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.195362] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.205966] env[62813]: DEBUG nova.compute.provider_tree [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2160.207356] env[62813]: INFO nova.compute.manager [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Terminating instance [ 2160.209376] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquiring lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.209535] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Acquired lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.209709] env[62813]: DEBUG nova.network.neutron [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2160.220234] env[62813]: DEBUG nova.scheduler.client.report [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2160.235591] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.236142] env[62813]: DEBUG nova.compute.manager [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2160.242196] env[62813]: DEBUG nova.network.neutron [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2160.271977] env[62813]: DEBUG nova.compute.utils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2160.273937] env[62813]: DEBUG nova.compute.manager [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Not allocating networking since 'none' was specified. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 2160.284044] env[62813]: DEBUG nova.compute.manager [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2160.306736] env[62813]: DEBUG nova.network.neutron [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.315405] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Releasing lock "refresh_cache-f7777e07-72df-4af1-8f22-ccb71db0e06a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2160.315828] env[62813]: DEBUG nova.compute.manager [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2160.316377] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2160.319818] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9cc4a61-45ed-4ad8-b2ac-36cc92b2dc58 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.329744] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb408eaa-99f9-4ee1-9743-a624b3d00035 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.347495] env[62813]: DEBUG nova.compute.manager [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2160.360128] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f7777e07-72df-4af1-8f22-ccb71db0e06a could not be found. [ 2160.360367] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2160.360549] env[62813]: INFO nova.compute.manager [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2160.360809] env[62813]: DEBUG oslo.service.loopingcall [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2160.361081] env[62813]: DEBUG nova.compute.manager [-] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2160.361183] env[62813]: DEBUG nova.network.neutron [-] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2160.373445] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2160.373690] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2160.373851] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2160.374048] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2160.374205] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2160.374354] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2160.374563] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2160.375106] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2160.375106] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2160.375106] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2160.375277] env[62813]: DEBUG nova.virt.hardware [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2160.376377] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927146ab-352a-4065-9324-ff17cfaf0896 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.385045] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb455706-6f64-4dcf-a926-f0a5cabee8e2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.398590] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Instance VIF info [] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2160.404184] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Creating folder: Project (7eea11108f07437ebae5238838a88780). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2160.404495] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8343dfd2-45e8-4513-bb7c-b554d8c35009 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.415832] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Created folder: Project (7eea11108f07437ebae5238838a88780) in parent group-v840812. [ 2160.415832] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Creating folder: Instances. Parent ref: group-v840931. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2160.415832] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38876d55-a3e0-4007-8fc4-ce54fd8c0908 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.427327] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Created folder: Instances in parent group-v840931. [ 2160.427586] env[62813]: DEBUG oslo.service.loopingcall [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2160.427842] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2160.428016] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db71f065-1fb7-4b8a-be00-9b2d89da7707 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.444939] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2160.444939] env[62813]: value = "task-4267792" [ 2160.444939] env[62813]: _type = "Task" [ 2160.444939] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.454016] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267792, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.491598] env[62813]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62813) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2160.491984] env[62813]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-b8f5f9cd-dae8-4eaf-9209-9f7cbc6a88d2'] [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.492556] env[62813]: ERROR oslo.service.loopingcall [ 2160.494446] env[62813]: ERROR nova.compute.manager [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.526068] env[62813]: ERROR nova.compute.manager [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Traceback (most recent call last): [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] ret = obj(*args, **kwargs) [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] exception_handler_v20(status_code, error_body) [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise client_exc(message=error_message, [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Neutron server returns request_ids: ['req-b8f5f9cd-dae8-4eaf-9209-9f7cbc6a88d2'] [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] During handling of the above exception, another exception occurred: [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Traceback (most recent call last): [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._delete_instance(context, instance, bdms) [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._shutdown_instance(context, instance, bdms) [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._try_deallocate_network(context, instance, requested_networks) [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] with excutils.save_and_reraise_exception(): [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self.force_reraise() [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise self.value [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] _deallocate_network_with_retries() [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return evt.wait() [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = hub.switch() [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.greenlet.switch() [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = func(*self.args, **self.kw) [ 2160.526068] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] result = f(*args, **kwargs) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._deallocate_network( [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self.network_api.deallocate_for_instance( [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] data = neutron.list_ports(**search_opts) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] ret = obj(*args, **kwargs) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.list('ports', self.ports_path, retrieve_all, [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] ret = obj(*args, **kwargs) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] for r in self._pagination(collection, path, **params): [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] res = self.get(path, params=params) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] ret = obj(*args, **kwargs) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.retry_request("GET", action, body=body, [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] ret = obj(*args, **kwargs) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] return self.do_request(method, action, body=body, [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] ret = obj(*args, **kwargs) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] self._handle_fault_response(status_code, replybody, resp) [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.527273] env[62813]: ERROR nova.compute.manager [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] [ 2160.555889] env[62813]: DEBUG oslo_concurrency.lockutils [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.361s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.557223] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 411.969s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.557401] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] During sync_power_state the instance has a pending task (deleting). Skip. [ 2160.557583] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "f7777e07-72df-4af1-8f22-ccb71db0e06a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.629717] env[62813]: INFO nova.compute.manager [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] [instance: f7777e07-72df-4af1-8f22-ccb71db0e06a] Successfully reverted task state from None on failure for instance. [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server [None req-287866bc-25d0-431d-bae2-0483ee4477be tempest-ServerShowV247Test-718164338 tempest-ServerShowV247Test-718164338-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-b8f5f9cd-dae8-4eaf-9209-9f7cbc6a88d2'] [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2160.634258] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2160.635847] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2160.637297] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2160.637297] env[62813]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2160.637297] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2160.637297] env[62813]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2160.637297] env[62813]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2160.637297] env[62813]: ERROR oslo_messaging.rpc.server [ 2160.957349] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267792, 'name': CreateVM_Task, 'duration_secs': 0.275749} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.957529] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2160.957961] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.958143] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.958496] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2160.958776] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0916d8f1-dd70-4740-baaf-baae07459ac1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.963766] env[62813]: DEBUG oslo_vmware.api [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Waiting for the task: (returnval){ [ 2160.963766] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52ec271c-9f5a-5be3-0e59-4d8775e40220" [ 2160.963766] env[62813]: _type = "Task" [ 2160.963766] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.971698] env[62813]: DEBUG oslo_vmware.api [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52ec271c-9f5a-5be3-0e59-4d8775e40220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.476456] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.476650] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2161.476862] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1d34f50e-be63-4b6d-bbca-030c6d2088b7 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.166324] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2167.166750] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2169.164515] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2169.164798] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2175.164752] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2175.165165] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2175.165165] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2175.186992] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.186992] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.186992] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.186992] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.186992] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.186992] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.187480] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.187480] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.187480] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2175.187480] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2177.164728] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.586467] env[62813]: DEBUG oslo_concurrency.lockutils [None req-2e96bfda-1550-44c8-88f5-01f0e0a4b016 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "926a846d-f902-4ec3-898e-439f10b4ee68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.163832] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2179.163931] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2179.175875] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.176132] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.176313] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.176472] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2179.178113] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0639bdc2-62ba-4f05-ac1a-a241764ed8dc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.187120] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba7d5a6-d102-4dc9-9ddb-49c24c223801 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.203413] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7c1dc1-165d-43ef-8511-7d609bcb9808 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.210676] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439d8513-2a16-4a1d-acac-55a15f5273d5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.243354] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180773MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2179.243516] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.243723] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.343645] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 63766a48-0d55-4261-9949-be3335ae8d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.343824] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.343985] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344133] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344260] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344409] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344533] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344663] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344784] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 10ce3bdd-0caa-47ff-bd11-90c038cc6be8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2179.344986] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2179.345155] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=225GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2179.470631] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdf0b96-35ca-46f8-b4b5-2444bd813c4e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.478894] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11aaf800-4e47-497f-8158-6db060a45799 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.509130] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5abbaaf-cdcf-4900-a9c9-7bb8b4220f3e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.517315] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9aa02e-0db1-4d0f-af9b-72ae5129930d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.534219] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2179.544824] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2179.561058] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2179.561280] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.318s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.199051] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "364c3489-27d4-47c9-a447-7ca4af197f67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.199406] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "364c3489-27d4-47c9-a447-7ca4af197f67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.210515] env[62813]: DEBUG nova.compute.manager [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2181.262512] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.262833] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.264803] env[62813]: INFO nova.compute.claims [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2181.442594] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a0f2fe-6392-42ef-b749-90a22d43cb0d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.450928] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ed775d-45ed-407e-80ad-50009a9c74bb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.481388] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f704a504-f489-4da4-9e3d-47242120f8cf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.489062] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a137c1f6-7352-4b28-b37d-9fc566a5bdf9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.502275] env[62813]: DEBUG nova.compute.provider_tree [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2181.510998] env[62813]: DEBUG nova.scheduler.client.report [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2181.526017] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.526506] env[62813]: DEBUG nova.compute.manager [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2181.556535] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.561411] env[62813]: DEBUG nova.compute.utils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2181.562681] env[62813]: DEBUG nova.compute.manager [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2181.562906] env[62813]: DEBUG nova.network.neutron [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2181.573063] env[62813]: DEBUG nova.compute.manager [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2181.620617] env[62813]: DEBUG nova.policy [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e83a1e9938040319abff86403da1abd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed4f82f29464418095009edeaaabf851', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2181.641196] env[62813]: DEBUG nova.compute.manager [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2181.666131] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2181.666385] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2181.666546] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2181.666828] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2181.666981] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2181.667155] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2181.667374] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2181.667540] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2181.667715] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2181.667879] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2181.668075] env[62813]: DEBUG nova.virt.hardware [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2181.668951] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05e2a1f-b9c3-48cc-bfce-34d0c5e13a11 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.677661] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c16f976-b657-4f19-b0b2-ecd1a3b872c4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.010809] env[62813]: DEBUG nova.network.neutron [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Successfully created port: 4e8438b4-b171-48cf-88cc-8adb44d9e665 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2182.163670] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2182.628603] env[62813]: DEBUG nova.compute.manager [req-1023976d-7960-4d4c-bf06-ca6c935a35ee req-58e3a9c4-538c-47b8-b308-ddbff367f738 service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Received event network-vif-plugged-4e8438b4-b171-48cf-88cc-8adb44d9e665 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2182.628886] env[62813]: DEBUG oslo_concurrency.lockutils [req-1023976d-7960-4d4c-bf06-ca6c935a35ee req-58e3a9c4-538c-47b8-b308-ddbff367f738 service nova] Acquiring lock "364c3489-27d4-47c9-a447-7ca4af197f67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.629024] env[62813]: DEBUG oslo_concurrency.lockutils [req-1023976d-7960-4d4c-bf06-ca6c935a35ee req-58e3a9c4-538c-47b8-b308-ddbff367f738 service nova] Lock "364c3489-27d4-47c9-a447-7ca4af197f67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.629195] env[62813]: DEBUG oslo_concurrency.lockutils [req-1023976d-7960-4d4c-bf06-ca6c935a35ee req-58e3a9c4-538c-47b8-b308-ddbff367f738 service nova] Lock "364c3489-27d4-47c9-a447-7ca4af197f67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.629361] env[62813]: DEBUG nova.compute.manager [req-1023976d-7960-4d4c-bf06-ca6c935a35ee req-58e3a9c4-538c-47b8-b308-ddbff367f738 service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] No waiting events found dispatching network-vif-plugged-4e8438b4-b171-48cf-88cc-8adb44d9e665 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2182.629532] env[62813]: WARNING nova.compute.manager [req-1023976d-7960-4d4c-bf06-ca6c935a35ee req-58e3a9c4-538c-47b8-b308-ddbff367f738 service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Received unexpected event network-vif-plugged-4e8438b4-b171-48cf-88cc-8adb44d9e665 for instance with vm_state building and task_state spawning. [ 2182.714906] env[62813]: DEBUG nova.network.neutron [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Successfully updated port: 4e8438b4-b171-48cf-88cc-8adb44d9e665 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2182.728375] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "refresh_cache-364c3489-27d4-47c9-a447-7ca4af197f67" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.728582] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired lock "refresh_cache-364c3489-27d4-47c9-a447-7ca4af197f67" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.728777] env[62813]: DEBUG nova.network.neutron [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2182.780255] env[62813]: DEBUG nova.network.neutron [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2182.975294] env[62813]: DEBUG nova.network.neutron [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Updating instance_info_cache with network_info: [{"id": "4e8438b4-b171-48cf-88cc-8adb44d9e665", "address": "fa:16:3e:b2:48:46", "network": {"id": "3634497e-4629-49a7-8257-310f15553ab0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-918616078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed4f82f29464418095009edeaaabf851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8438b4-b1", "ovs_interfaceid": "4e8438b4-b171-48cf-88cc-8adb44d9e665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2182.989101] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Releasing lock "refresh_cache-364c3489-27d4-47c9-a447-7ca4af197f67" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2182.989419] env[62813]: DEBUG nova.compute.manager [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Instance network_info: |[{"id": "4e8438b4-b171-48cf-88cc-8adb44d9e665", "address": "fa:16:3e:b2:48:46", "network": {"id": "3634497e-4629-49a7-8257-310f15553ab0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-918616078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed4f82f29464418095009edeaaabf851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8438b4-b1", "ovs_interfaceid": "4e8438b4-b171-48cf-88cc-8adb44d9e665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2182.989838] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:48:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b2049d7-f99e-425a-afdb-2c95ca88e483', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e8438b4-b171-48cf-88cc-8adb44d9e665', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2182.997425] env[62813]: DEBUG oslo.service.loopingcall [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2182.997915] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2182.998233] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72d041cb-700f-4d63-a122-f4680f17dc98 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.019580] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2183.019580] env[62813]: value = "task-4267793" [ 2183.019580] env[62813]: _type = "Task" [ 2183.019580] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.028848] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267793, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.530888] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267793, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.030461] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267793, 'name': CreateVM_Task} progress is 99%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.530958] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267793, 'name': CreateVM_Task, 'duration_secs': 1.341072} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.531159] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2184.531896] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.532053] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.532403] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2184.532664] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea1740c6-5a3c-4ffa-96c8-e6ef26ef7edc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.537425] env[62813]: DEBUG oslo_vmware.api [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for the task: (returnval){ [ 2184.537425] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]5236a44e-853c-e1a1-65a1-b5f1aac2c79d" [ 2184.537425] env[62813]: _type = "Task" [ 2184.537425] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.545285] env[62813]: DEBUG oslo_vmware.api [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]5236a44e-853c-e1a1-65a1-b5f1aac2c79d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.659256] env[62813]: DEBUG nova.compute.manager [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Received event network-changed-4e8438b4-b171-48cf-88cc-8adb44d9e665 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2184.659497] env[62813]: DEBUG nova.compute.manager [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Refreshing instance network info cache due to event network-changed-4e8438b4-b171-48cf-88cc-8adb44d9e665. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2184.659779] env[62813]: DEBUG oslo_concurrency.lockutils [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] Acquiring lock "refresh_cache-364c3489-27d4-47c9-a447-7ca4af197f67" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.660036] env[62813]: DEBUG oslo_concurrency.lockutils [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] Acquired lock "refresh_cache-364c3489-27d4-47c9-a447-7ca4af197f67" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.660178] env[62813]: DEBUG nova.network.neutron [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Refreshing network info cache for port 4e8438b4-b171-48cf-88cc-8adb44d9e665 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2184.919819] env[62813]: DEBUG nova.network.neutron [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Updated VIF entry in instance network info cache for port 4e8438b4-b171-48cf-88cc-8adb44d9e665. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2184.920375] env[62813]: DEBUG nova.network.neutron [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Updating instance_info_cache with network_info: [{"id": "4e8438b4-b171-48cf-88cc-8adb44d9e665", "address": "fa:16:3e:b2:48:46", "network": {"id": "3634497e-4629-49a7-8257-310f15553ab0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-918616078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed4f82f29464418095009edeaaabf851", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b2049d7-f99e-425a-afdb-2c95ca88e483", "external-id": "nsx-vlan-transportzone-803", "segmentation_id": 803, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8438b4-b1", "ovs_interfaceid": "4e8438b4-b171-48cf-88cc-8adb44d9e665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2184.933496] env[62813]: DEBUG oslo_concurrency.lockutils [req-3fc95197-33f3-4a6a-8c6b-199f57752ee5 req-7eb27352-4bcb-4201-8a36-65ddf726020a service nova] Releasing lock "refresh_cache-364c3489-27d4-47c9-a447-7ca4af197f67" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.049319] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.049713] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2185.049713] env[62813]: DEBUG oslo_concurrency.lockutils [None req-69b4d9ff-ab8a-4acc-a5db-3103cd8f13c5 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.014567] env[62813]: WARNING oslo_vmware.rw_handles [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2205.014567] env[62813]: ERROR oslo_vmware.rw_handles [ 2205.015348] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2205.017029] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2205.017334] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Copying Virtual Disk [datastore2] vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/26640e0c-d8a0-47fd-8aad-6e908d9e7beb/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2205.017649] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82c7ecbd-65b5-40d6-ba29-3d0dbd42fa19 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.026185] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for the task: (returnval){ [ 2205.026185] env[62813]: value = "task-4267794" [ 2205.026185] env[62813]: _type = "Task" [ 2205.026185] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.034721] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Task: {'id': task-4267794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.537550] env[62813]: DEBUG oslo_vmware.exceptions [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2205.537837] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.538414] env[62813]: ERROR nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2205.538414] env[62813]: Faults: ['InvalidArgument'] [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Traceback (most recent call last): [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] yield resources [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self.driver.spawn(context, instance, image_meta, [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._fetch_image_if_missing(context, vi) [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] image_cache(vi, tmp_image_ds_loc) [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] vm_util.copy_virtual_disk( [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] session._wait_for_task(vmdk_copy_task) [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.wait_for_task(task_ref) [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return evt.wait() [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] result = hub.switch() [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.greenlet.switch() [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self.f(*self.args, **self.kw) [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] raise exceptions.translate_fault(task_info.error) [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Faults: ['InvalidArgument'] [ 2205.538414] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] [ 2205.539287] env[62813]: INFO nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Terminating instance [ 2205.542061] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.542061] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2205.542061] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.542061] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquired lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.542281] env[62813]: DEBUG nova.network.neutron [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2205.543254] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adab23e8-c070-447e-a266-1bc43c070096 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.554366] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2205.554578] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2205.555616] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e94f58d-1b94-4578-aacc-da3194abb12f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.561441] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 2205.561441] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]529eea98-a3f6-3fbb-7c0f-84e394e3463e" [ 2205.561441] env[62813]: _type = "Task" [ 2205.561441] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.570379] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]529eea98-a3f6-3fbb-7c0f-84e394e3463e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.573201] env[62813]: DEBUG nova.network.neutron [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2205.640190] env[62813]: DEBUG nova.network.neutron [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.651421] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Releasing lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.651840] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2205.652061] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2205.653282] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f9389-0531-4001-acf0-9f47b7e09985 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.662146] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2205.662395] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-915c0308-986b-40e1-8a19-d11761a72196 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.701673] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2205.701899] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2205.701997] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Deleting the datastore file [datastore2] 63766a48-0d55-4261-9949-be3335ae8d0a {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2205.702295] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c11aa911-d0af-408e-989a-9174c38241cc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.708776] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for the task: (returnval){ [ 2205.708776] env[62813]: value = "task-4267796" [ 2205.708776] env[62813]: _type = "Task" [ 2205.708776] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.716502] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Task: {'id': task-4267796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.072502] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2206.072867] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating directory with path [datastore2] vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2206.072995] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68d71754-3d65-4205-b40d-7f81b6612603 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.085570] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Created directory with path [datastore2] vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2206.085814] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Fetch image to [datastore2] vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2206.085950] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2206.086724] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc87da0f-55ec-47be-9f71-0564f0c5c4e7 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.093856] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a060b081-07ef-421f-9428-51797249b819 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.103217] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c540950-4e76-4ef3-a87d-68cbc401c840 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.135347] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34141762-fe0f-4d0d-b140-61677d1ec9bd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.141527] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1c08761e-0afe-4d51-8e41-5eeb0258f0ce {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.164638] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2206.218667] env[62813]: DEBUG oslo_vmware.api [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Task: {'id': task-4267796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038953} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.219771] env[62813]: DEBUG oslo_vmware.rw_handles [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2206.221625] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2206.221850] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2206.222053] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2206.222246] env[62813]: INFO nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Took 0.57 seconds to destroy the instance on the hypervisor. [ 2206.222520] env[62813]: DEBUG oslo.service.loopingcall [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2206.223584] env[62813]: DEBUG nova.compute.manager [-] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network deallocation for instance since networking was not requested. {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2206.275757] env[62813]: DEBUG nova.compute.claims [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2206.275950] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.276200] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.280830] env[62813]: DEBUG oslo_vmware.rw_handles [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2206.281016] env[62813]: DEBUG oslo_vmware.rw_handles [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2206.453518] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08ab3e5-3363-4ace-b104-a105ebd4d23b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.461493] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e452c62-74d6-4711-b1eb-a7cf87d2d9ba {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.492407] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403f7e03-c6fb-4633-946f-1893fd74c341 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.500489] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d71bdfd-7677-4826-8217-a5b740e47341 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.514663] env[62813]: DEBUG nova.compute.provider_tree [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2206.524897] env[62813]: DEBUG nova.scheduler.client.report [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2206.540725] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.541358] env[62813]: ERROR nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2206.541358] env[62813]: Faults: ['InvalidArgument'] [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Traceback (most recent call last): [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self.driver.spawn(context, instance, image_meta, [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._fetch_image_if_missing(context, vi) [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] image_cache(vi, tmp_image_ds_loc) [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] vm_util.copy_virtual_disk( [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] session._wait_for_task(vmdk_copy_task) [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.wait_for_task(task_ref) [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return evt.wait() [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] result = hub.switch() [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.greenlet.switch() [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self.f(*self.args, **self.kw) [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] raise exceptions.translate_fault(task_info.error) [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Faults: ['InvalidArgument'] [ 2206.541358] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] [ 2206.542212] env[62813]: DEBUG nova.compute.utils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2206.543695] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Build of instance 63766a48-0d55-4261-9949-be3335ae8d0a was re-scheduled: A specified parameter was not correct: fileType [ 2206.543695] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2206.544194] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2206.544477] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.544712] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquired lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.544978] env[62813]: DEBUG nova.network.neutron [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2206.571148] env[62813]: DEBUG nova.network.neutron [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2206.678534] env[62813]: DEBUG nova.network.neutron [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2206.689642] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Releasing lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.689887] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2206.690132] env[62813]: DEBUG nova.compute.manager [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Skipping network deallocation for instance since networking was not requested. {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2206.789065] env[62813]: INFO nova.scheduler.client.report [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Deleted allocations for instance 63766a48-0d55-4261-9949-be3335ae8d0a [ 2206.811481] env[62813]: DEBUG oslo_concurrency.lockutils [None req-28a4a1d6-b61c-4dfc-ad28-861d903a83cf tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "63766a48-0d55-4261-9949-be3335ae8d0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.958s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.811732] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "63766a48-0d55-4261-9949-be3335ae8d0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.081s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.811973] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "63766a48-0d55-4261-9949-be3335ae8d0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.812197] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "63766a48-0d55-4261-9949-be3335ae8d0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.812367] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "63766a48-0d55-4261-9949-be3335ae8d0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.814486] env[62813]: INFO nova.compute.manager [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Terminating instance [ 2206.816193] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquiring lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.816351] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Acquired lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.816519] env[62813]: DEBUG nova.network.neutron [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2206.844717] env[62813]: DEBUG nova.network.neutron [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2206.943670] env[62813]: DEBUG nova.network.neutron [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2206.953976] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Releasing lock "refresh_cache-63766a48-0d55-4261-9949-be3335ae8d0a" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.954548] env[62813]: DEBUG nova.compute.manager [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2206.954769] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2206.955378] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb1e77f7-1b8e-4b2d-ac44-856f6b568cd1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.966492] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcd7b28-f2db-4804-9000-6ab15be8d058 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.997695] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63766a48-0d55-4261-9949-be3335ae8d0a could not be found. [ 2206.997918] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2206.998126] env[62813]: INFO nova.compute.manager [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2206.998383] env[62813]: DEBUG oslo.service.loopingcall [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2206.998625] env[62813]: DEBUG nova.compute.manager [-] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2206.998722] env[62813]: DEBUG nova.network.neutron [-] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2207.113018] env[62813]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62813) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2207.113018] env[62813]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-6b3a66ce-f03b-4c10-98e8-49529814b889'] [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.113682] env[62813]: ERROR oslo.service.loopingcall [ 2207.115393] env[62813]: ERROR nova.compute.manager [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.149552] env[62813]: ERROR nova.compute.manager [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Traceback (most recent call last): [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] ret = obj(*args, **kwargs) [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] exception_handler_v20(status_code, error_body) [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] raise client_exc(message=error_message, [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Neutron server returns request_ids: ['req-6b3a66ce-f03b-4c10-98e8-49529814b889'] [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] During handling of the above exception, another exception occurred: [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Traceback (most recent call last): [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._delete_instance(context, instance, bdms) [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._shutdown_instance(context, instance, bdms) [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._try_deallocate_network(context, instance, requested_networks) [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] with excutils.save_and_reraise_exception(): [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self.force_reraise() [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] raise self.value [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] _deallocate_network_with_retries() [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return evt.wait() [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] result = hub.switch() [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.greenlet.switch() [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] result = func(*self.args, **self.kw) [ 2207.149552] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] result = f(*args, **kwargs) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._deallocate_network( [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self.network_api.deallocate_for_instance( [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] data = neutron.list_ports(**search_opts) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] ret = obj(*args, **kwargs) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.list('ports', self.ports_path, retrieve_all, [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] ret = obj(*args, **kwargs) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] for r in self._pagination(collection, path, **params): [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] res = self.get(path, params=params) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] ret = obj(*args, **kwargs) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.retry_request("GET", action, body=body, [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] ret = obj(*args, **kwargs) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] return self.do_request(method, action, body=body, [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] ret = obj(*args, **kwargs) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] self._handle_fault_response(status_code, replybody, resp) [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.150742] env[62813]: ERROR nova.compute.manager [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] [ 2207.178660] env[62813]: DEBUG oslo_concurrency.lockutils [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Lock "63766a48-0d55-4261-9949-be3335ae8d0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.367s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.229376] env[62813]: INFO nova.compute.manager [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] [instance: 63766a48-0d55-4261-9949-be3335ae8d0a] Successfully reverted task state from None on failure for instance. [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server [None req-132bd4b9-e983-453e-8b38-529a309a4c60 tempest-ServerShowV257Test-1678213244 tempest-ServerShowV257Test-1678213244-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-6b3a66ce-f03b-4c10-98e8-49529814b889'] [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2207.232831] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server raise self.value [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2207.234296] env[62813]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2207.235691] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2207.235691] env[62813]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2207.235691] env[62813]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2207.235691] env[62813]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2207.235691] env[62813]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2207.235691] env[62813]: ERROR oslo_messaging.rpc.server [ 2214.310501] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "e7283591-30ac-4132-9b7f-d407a82e9b87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.310836] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "e7283591-30ac-4132-9b7f-d407a82e9b87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.322158] env[62813]: DEBUG nova.compute.manager [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2214.372131] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.372389] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.373936] env[62813]: INFO nova.compute.claims [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2214.563029] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465fab4d-10e2-4fef-95d9-9f398eaefa25 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.571973] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a09d47-b24c-4b8d-a228-73940ce5e497 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.603189] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03b29d5-8367-40e4-a202-e31e32fc96a1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.611266] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6c034c-03d0-4f94-b0c4-adc0ec8f11eb {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.626431] env[62813]: DEBUG nova.compute.provider_tree [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2214.635676] env[62813]: DEBUG nova.scheduler.client.report [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2214.649655] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.277s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.650193] env[62813]: DEBUG nova.compute.manager [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2214.690263] env[62813]: DEBUG nova.compute.utils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2214.691588] env[62813]: DEBUG nova.compute.manager [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2214.691763] env[62813]: DEBUG nova.network.neutron [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2214.725255] env[62813]: DEBUG nova.compute.manager [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2214.806497] env[62813]: DEBUG nova.policy [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05e5639b4c294098ac97eae52872b91c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dab4ddba893f4b47886bb54e9083c414', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2214.809338] env[62813]: DEBUG nova.compute.manager [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2214.835318] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2214.835587] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2214.835756] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2214.835941] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2214.836871] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2214.837114] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2214.837351] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2214.837522] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2214.837700] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2214.837869] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2214.838060] env[62813]: DEBUG nova.virt.hardware [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2214.838948] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b21406-94b6-425a-adee-373205f3728b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.850024] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cf6af2-b117-4dd7-945a-c8d0f4da91c0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.184543] env[62813]: DEBUG nova.network.neutron [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Successfully created port: 15a500ab-07b5-49d7-a387-10a7230dfe27 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2216.018888] env[62813]: DEBUG nova.compute.manager [req-3f8e306b-d424-4579-abf2-415b5336b469 req-62bc9644-cf57-47d6-99b4-ac640ae1f14a service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Received event network-vif-plugged-15a500ab-07b5-49d7-a387-10a7230dfe27 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2216.019168] env[62813]: DEBUG oslo_concurrency.lockutils [req-3f8e306b-d424-4579-abf2-415b5336b469 req-62bc9644-cf57-47d6-99b4-ac640ae1f14a service nova] Acquiring lock "e7283591-30ac-4132-9b7f-d407a82e9b87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.019393] env[62813]: DEBUG oslo_concurrency.lockutils [req-3f8e306b-d424-4579-abf2-415b5336b469 req-62bc9644-cf57-47d6-99b4-ac640ae1f14a service nova] Lock "e7283591-30ac-4132-9b7f-d407a82e9b87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.019571] env[62813]: DEBUG oslo_concurrency.lockutils [req-3f8e306b-d424-4579-abf2-415b5336b469 req-62bc9644-cf57-47d6-99b4-ac640ae1f14a service nova] Lock "e7283591-30ac-4132-9b7f-d407a82e9b87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.019744] env[62813]: DEBUG nova.compute.manager [req-3f8e306b-d424-4579-abf2-415b5336b469 req-62bc9644-cf57-47d6-99b4-ac640ae1f14a service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] No waiting events found dispatching network-vif-plugged-15a500ab-07b5-49d7-a387-10a7230dfe27 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2216.019917] env[62813]: WARNING nova.compute.manager [req-3f8e306b-d424-4579-abf2-415b5336b469 req-62bc9644-cf57-47d6-99b4-ac640ae1f14a service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Received unexpected event network-vif-plugged-15a500ab-07b5-49d7-a387-10a7230dfe27 for instance with vm_state building and task_state spawning. [ 2216.112127] env[62813]: DEBUG nova.network.neutron [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Successfully updated port: 15a500ab-07b5-49d7-a387-10a7230dfe27 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2216.124350] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "refresh_cache-e7283591-30ac-4132-9b7f-d407a82e9b87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.124530] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "refresh_cache-e7283591-30ac-4132-9b7f-d407a82e9b87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.124693] env[62813]: DEBUG nova.network.neutron [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2216.179189] env[62813]: DEBUG nova.network.neutron [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2216.459942] env[62813]: DEBUG nova.network.neutron [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Updating instance_info_cache with network_info: [{"id": "15a500ab-07b5-49d7-a387-10a7230dfe27", "address": "fa:16:3e:f1:b3:fe", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a500ab-07", "ovs_interfaceid": "15a500ab-07b5-49d7-a387-10a7230dfe27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.472579] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "refresh_cache-e7283591-30ac-4132-9b7f-d407a82e9b87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.472886] env[62813]: DEBUG nova.compute.manager [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Instance network_info: |[{"id": "15a500ab-07b5-49d7-a387-10a7230dfe27", "address": "fa:16:3e:f1:b3:fe", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a500ab-07", "ovs_interfaceid": "15a500ab-07b5-49d7-a387-10a7230dfe27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2216.473394] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:b3:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4fcde7-8926-402a-a9b7-4878d2bc1cf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15a500ab-07b5-49d7-a387-10a7230dfe27', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2216.481706] env[62813]: DEBUG oslo.service.loopingcall [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.482260] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2216.482967] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4fca7fd-9be7-4bf4-9abc-5c3a109af43d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.503151] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2216.503151] env[62813]: value = "task-4267797" [ 2216.503151] env[62813]: _type = "Task" [ 2216.503151] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.511475] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267797, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.001865] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d20c5e5d-fd91-4f33-9ce2-b4456a4842ac tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "366200bc-8852-45a3-be8b-016265dbfed1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.013688] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267797, 'name': CreateVM_Task, 'duration_secs': 0.311181} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.013869] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2217.014604] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.014770] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.015128] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2217.015397] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c889c67a-fc31-4e31-945e-e0afc750a02d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.020465] env[62813]: DEBUG oslo_vmware.api [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 2217.020465] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52133a22-218f-22f4-e1b1-758a635fd0e6" [ 2217.020465] env[62813]: _type = "Task" [ 2217.020465] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.029147] env[62813]: DEBUG oslo_vmware.api [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52133a22-218f-22f4-e1b1-758a635fd0e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.531429] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.531687] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2217.531901] env[62813]: DEBUG oslo_concurrency.lockutils [None req-57856f7e-038d-4299-b422-123d152273f8 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.047914] env[62813]: DEBUG nova.compute.manager [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Received event network-changed-15a500ab-07b5-49d7-a387-10a7230dfe27 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2218.048169] env[62813]: DEBUG nova.compute.manager [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Refreshing instance network info cache due to event network-changed-15a500ab-07b5-49d7-a387-10a7230dfe27. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2218.048327] env[62813]: DEBUG oslo_concurrency.lockutils [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] Acquiring lock "refresh_cache-e7283591-30ac-4132-9b7f-d407a82e9b87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.048479] env[62813]: DEBUG oslo_concurrency.lockutils [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] Acquired lock "refresh_cache-e7283591-30ac-4132-9b7f-d407a82e9b87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.048672] env[62813]: DEBUG nova.network.neutron [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Refreshing network info cache for port 15a500ab-07b5-49d7-a387-10a7230dfe27 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2218.378785] env[62813]: DEBUG nova.network.neutron [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Updated VIF entry in instance network info cache for port 15a500ab-07b5-49d7-a387-10a7230dfe27. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2218.379166] env[62813]: DEBUG nova.network.neutron [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Updating instance_info_cache with network_info: [{"id": "15a500ab-07b5-49d7-a387-10a7230dfe27", "address": "fa:16:3e:f1:b3:fe", "network": {"id": "0d8c8a43-7fa2-405e-9c3d-8c25c7410832", "bridge": "br-int", "label": "tempest-ServersTestJSON-1384810997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dab4ddba893f4b47886bb54e9083c414", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a500ab-07", "ovs_interfaceid": "15a500ab-07b5-49d7-a387-10a7230dfe27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.388883] env[62813]: DEBUG oslo_concurrency.lockutils [req-ba211bb3-a795-49d7-9c80-c53d7ecdfb03 req-990497d0-b8ec-447d-9a6e-d760ce90d00d service nova] Releasing lock "refresh_cache-e7283591-30ac-4132-9b7f-d407a82e9b87" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2229.164035] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.164361] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.164488] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2230.164736] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2237.164143] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2237.164568] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2237.164568] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2237.188452] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.188657] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.188766] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.188899] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189037] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189168] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189291] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189414] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189535] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189656] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2237.189775] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2237.190309] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.163906] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.164316] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.175739] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.176086] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2239.176233] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.176341] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2239.177442] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be868860-a61b-4b4e-a1ce-49e61a5baf94 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.186704] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00721f06-fb54-4f58-97fc-34a487d7b70f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.201980] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e18b872-baac-45ed-b5b6-dc4f260e3e91 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.208653] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7353f8b-bb29-44c8-b1d6-a02587c63d5a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.237457] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180762MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2239.237651] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.237805] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2239.306351] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.306520] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.306650] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.306781] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.306906] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.307049] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.307189] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.307309] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 10ce3bdd-0caa-47ff-bd11-90c038cc6be8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.307428] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 364c3489-27d4-47c9-a447-7ca4af197f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.307555] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e7283591-30ac-4132-9b7f-d407a82e9b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2239.307747] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2239.307885] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=225GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2239.434760] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f8b1d1-da13-4f10-b6ea-fc4689b450f0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.442548] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae3f86-aba7-4fce-b28a-5ac18f335712 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.473416] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd07f756-fe92-4729-87b8-88519f6048e9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.480510] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f97721-b5da-46f1-b8a8-3c7cc91e6731 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.493656] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2239.503623] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2239.518332] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2239.518535] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.281s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2242.519645] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2243.159825] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.158720] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.956686] env[62813]: WARNING oslo_vmware.rw_handles [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2252.956686] env[62813]: ERROR oslo_vmware.rw_handles [ 2252.957398] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2252.959239] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2252.959522] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Copying Virtual Disk [datastore2] vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/db2f1779-ed2b-4401-8fa1-255c86fa8c30/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2252.959817] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0eec96d4-dc6d-4afa-975f-06c9340498fa {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.968256] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 2252.968256] env[62813]: value = "task-4267798" [ 2252.968256] env[62813]: _type = "Task" [ 2252.968256] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.977101] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': task-4267798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.479646] env[62813]: DEBUG oslo_vmware.exceptions [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2253.479923] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2253.480526] env[62813]: ERROR nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2253.480526] env[62813]: Faults: ['InvalidArgument'] [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Traceback (most recent call last): [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] yield resources [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self.driver.spawn(context, instance, image_meta, [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self._fetch_image_if_missing(context, vi) [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] image_cache(vi, tmp_image_ds_loc) [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] vm_util.copy_virtual_disk( [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] session._wait_for_task(vmdk_copy_task) [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] return self.wait_for_task(task_ref) [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] return evt.wait() [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] result = hub.switch() [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] return self.greenlet.switch() [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self.f(*self.args, **self.kw) [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] raise exceptions.translate_fault(task_info.error) [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Faults: ['InvalidArgument'] [ 2253.480526] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] [ 2253.481558] env[62813]: INFO nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Terminating instance [ 2253.482481] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2253.482692] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2253.482939] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcf7957f-fba0-43c3-9203-488e0cb08b67 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.485241] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2253.485439] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2253.486217] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8fda90-3ea3-4d25-b00d-72c412937dc8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.493366] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2253.493607] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f60baac-4a9b-4beb-9a67-923dbba7fba5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.495931] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2253.496121] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2253.497092] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72ce6d18-ffb4-45eb-808f-91e49eb00bdf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.502492] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Waiting for the task: (returnval){ [ 2253.502492] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52bd0fe8-4c0c-97ae-c418-fbf9a3a4c60a" [ 2253.502492] env[62813]: _type = "Task" [ 2253.502492] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.510734] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52bd0fe8-4c0c-97ae-c418-fbf9a3a4c60a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.575887] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2253.576141] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2253.576325] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Deleting the datastore file [datastore2] 1feb4a0d-0b0f-434e-91e9-321a48fb166c {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2253.576598] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad46baaa-f0fd-4125-aa32-c97ca31de603 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.582811] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for the task: (returnval){ [ 2253.582811] env[62813]: value = "task-4267800" [ 2253.582811] env[62813]: _type = "Task" [ 2253.582811] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.591031] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': task-4267800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.013246] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2254.013636] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Creating directory with path [datastore2] vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2254.013770] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8632d6ed-8577-40bf-8617-999128635e12 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.026622] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Created directory with path [datastore2] vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2254.026836] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Fetch image to [datastore2] vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2254.027038] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2254.027854] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5e7428-c9d1-4f26-9350-820311ab040f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.035235] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9637d5-5b9d-406d-b671-af76400aa35b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.046176] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc9e882-7286-4625-8465-b0e25c2e88fc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.078544] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758da4fe-9aeb-4fa6-8eca-9578b4c0e5d6 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.088296] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-95c59328-b3c8-4c4c-9057-60eb8fce8576 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.095671] env[62813]: DEBUG oslo_vmware.api [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Task: {'id': task-4267800, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065222} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.095962] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2254.096207] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2254.096400] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2254.096586] env[62813]: INFO nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2254.099125] env[62813]: DEBUG nova.compute.claims [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2254.099324] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2254.099618] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2254.115679] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2254.179542] env[62813]: DEBUG oslo_vmware.rw_handles [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2254.239772] env[62813]: DEBUG oslo_vmware.rw_handles [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2254.239772] env[62813]: DEBUG oslo_vmware.rw_handles [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2254.335716] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648f4581-330e-46bd-86f9-daf6883ff7c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.343872] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd499699-7234-472f-b653-36eeec7e92f0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.376245] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a38212f-58c9-46e7-af77-398c0c26c4ed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.385774] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e914270-3fdd-4c6d-b9a0-2c20febd5d55 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.399919] env[62813]: DEBUG nova.compute.provider_tree [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2254.410237] env[62813]: DEBUG nova.scheduler.client.report [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2254.427381] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.427939] env[62813]: ERROR nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2254.427939] env[62813]: Faults: ['InvalidArgument'] [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Traceback (most recent call last): [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self.driver.spawn(context, instance, image_meta, [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self._fetch_image_if_missing(context, vi) [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] image_cache(vi, tmp_image_ds_loc) [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] vm_util.copy_virtual_disk( [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] session._wait_for_task(vmdk_copy_task) [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] return self.wait_for_task(task_ref) [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] return evt.wait() [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] result = hub.switch() [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] return self.greenlet.switch() [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] self.f(*self.args, **self.kw) [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] raise exceptions.translate_fault(task_info.error) [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Faults: ['InvalidArgument'] [ 2254.427939] env[62813]: ERROR nova.compute.manager [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] [ 2254.428784] env[62813]: DEBUG nova.compute.utils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2254.430414] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Build of instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c was re-scheduled: A specified parameter was not correct: fileType [ 2254.430414] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2254.430810] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2254.431045] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2254.431249] env[62813]: DEBUG nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2254.431419] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2254.719041] env[62813]: DEBUG nova.network.neutron [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2254.735248] env[62813]: INFO nova.compute.manager [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Took 0.30 seconds to deallocate network for instance. [ 2254.848563] env[62813]: INFO nova.scheduler.client.report [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Deleted allocations for instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c [ 2254.871209] env[62813]: DEBUG oslo_concurrency.lockutils [None req-937e0b6c-7a06-41a0-beaf-d407883f40a3 tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.823s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.871501] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.944s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2254.871747] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Acquiring lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2254.871967] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2254.872164] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.874295] env[62813]: INFO nova.compute.manager [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Terminating instance [ 2254.876105] env[62813]: DEBUG nova.compute.manager [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2254.876572] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2254.876799] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7fbfdf08-1c5a-4262-b472-2626e573b0d3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.886640] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4021bf4-84b5-4169-a387-611e93a047a1 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.916599] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1feb4a0d-0b0f-434e-91e9-321a48fb166c could not be found. [ 2254.916817] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2254.917012] env[62813]: INFO nova.compute.manager [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2254.917270] env[62813]: DEBUG oslo.service.loopingcall [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2254.917527] env[62813]: DEBUG nova.compute.manager [-] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2254.917649] env[62813]: DEBUG nova.network.neutron [-] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2254.943185] env[62813]: DEBUG nova.network.neutron [-] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2254.953014] env[62813]: INFO nova.compute.manager [-] [instance: 1feb4a0d-0b0f-434e-91e9-321a48fb166c] Took 0.04 seconds to deallocate network for instance. [ 2255.058844] env[62813]: DEBUG oslo_concurrency.lockutils [None req-b72e7f96-fd2c-42d9-9f0f-741a32852f4e tempest-DeleteServersTestJSON-1762719963 tempest-DeleteServersTestJSON-1762719963-project-member] Lock "1feb4a0d-0b0f-434e-91e9-321a48fb166c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2289.164086] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2290.163985] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2290.164247] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2292.166121] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2297.168075] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2297.168075] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2297.168499] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2297.191400] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.191634] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.191692] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.191814] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.191941] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.192092] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.192258] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.192399] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.192522] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2297.192646] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2299.163154] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.163483] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.164610] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.176953] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2300.177223] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2300.178029] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2300.178029] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2300.178750] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743f5a6a-23a8-4cc6-9b12-ae97e1379dff {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.187808] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624278c2-dacc-4e8f-879f-9fc59473304e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.202591] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddedc29a-890a-4cb5-a6a0-bbfed2949c3c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.209596] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359b4dc2-affc-488d-8c06-7714e0a88125 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.240343] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180764MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2300.240500] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2300.240699] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2300.326901] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327077] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance dcc68892-3e75-4da9-975a-5b41c69205f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327213] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327343] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327464] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327584] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327703] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 10ce3bdd-0caa-47ff-bd11-90c038cc6be8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327822] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 364c3489-27d4-47c9-a447-7ca4af197f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.327938] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e7283591-30ac-4132-9b7f-d407a82e9b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2300.328148] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2300.328331] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=225GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2300.448099] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b35a9f-85fa-4a4d-877d-80d37a2b8c8f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.454597] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a5067b-2467-463c-a207-9ae49c0412f3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.486259] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18051f5e-26d1-4f9b-8f58-79487e51768a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.494466] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295d287d-accc-4516-af77-76c8208050e4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.507694] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2300.516069] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2300.530925] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2300.531135] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.290s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.570040] env[62813]: WARNING oslo_vmware.rw_handles [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2302.570040] env[62813]: ERROR oslo_vmware.rw_handles [ 2302.570040] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2302.571884] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2302.572148] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Copying Virtual Disk [datastore2] vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/57c1d37e-05ff-4554-ad36-e2f84f7f643c/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2302.572480] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8e77ec2-033b-471e-9eb8-cfcf8c5c5d1a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.581391] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Waiting for the task: (returnval){ [ 2302.581391] env[62813]: value = "task-4267801" [ 2302.581391] env[62813]: _type = "Task" [ 2302.581391] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2302.590887] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Task: {'id': task-4267801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.093031] env[62813]: DEBUG oslo_vmware.exceptions [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2303.093031] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2303.093559] env[62813]: ERROR nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2303.093559] env[62813]: Faults: ['InvalidArgument'] [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Traceback (most recent call last): [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] yield resources [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self.driver.spawn(context, instance, image_meta, [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self._fetch_image_if_missing(context, vi) [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] image_cache(vi, tmp_image_ds_loc) [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] vm_util.copy_virtual_disk( [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] session._wait_for_task(vmdk_copy_task) [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] return self.wait_for_task(task_ref) [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] return evt.wait() [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] result = hub.switch() [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] return self.greenlet.switch() [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self.f(*self.args, **self.kw) [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] raise exceptions.translate_fault(task_info.error) [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Faults: ['InvalidArgument'] [ 2303.093559] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] [ 2303.094564] env[62813]: INFO nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Terminating instance [ 2303.096275] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2303.096275] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2303.096275] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33c0023a-b08b-4dc8-922d-42bed38efa33 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.098297] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2303.098497] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2303.099254] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533a6be1-ae13-4a6a-a02f-245e54bb09d8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.106527] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2303.106771] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6491d7de-1bd5-42f6-9e5f-eb54adc7ea6b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.109038] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2303.109217] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2303.110173] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7027dd25-fdd5-41cc-bba4-885f43656b09 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.115094] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for the task: (returnval){ [ 2303.115094] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]523d8081-03a2-acfe-1542-ebad93aedd3f" [ 2303.115094] env[62813]: _type = "Task" [ 2303.115094] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.123191] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]523d8081-03a2-acfe-1542-ebad93aedd3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.175748] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2303.175969] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2303.176159] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Deleting the datastore file [datastore2] 7f344eb3-b1a2-454f-a647-2d9ec7da915f {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2303.176451] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a157f8a2-6a35-4efa-9ecf-83402171f2d5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.184150] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Waiting for the task: (returnval){ [ 2303.184150] env[62813]: value = "task-4267803" [ 2303.184150] env[62813]: _type = "Task" [ 2303.184150] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.192326] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Task: {'id': task-4267803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.530329] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2303.625574] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2303.625875] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Creating directory with path [datastore2] vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2303.626086] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-062b1674-6642-4e84-8011-27848e305d62 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.637481] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Created directory with path [datastore2] vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2303.637672] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Fetch image to [datastore2] vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2303.637844] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2303.638591] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660520cc-7dbb-4f03-92ef-b31a0da8410d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.645404] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a6811f-02ae-40e7-b14b-d917aca768e2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.654432] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62973828-c297-456e-a345-0a41f60d8af5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.685286] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ca08cb-1f78-4264-99fa-b4a0577ace76 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.696974] env[62813]: DEBUG oslo_vmware.api [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Task: {'id': task-4267803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079738} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.697520] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2303.697721] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2303.697901] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2303.698091] env[62813]: INFO nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2303.699766] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-906f2af7-5ff1-41a7-8868-223a5acba0b2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.701709] env[62813]: DEBUG nova.compute.claims [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2303.701891] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2303.702119] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.727013] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2303.791072] env[62813]: DEBUG oslo_vmware.rw_handles [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2303.850545] env[62813]: DEBUG oslo_vmware.rw_handles [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2303.850610] env[62813]: DEBUG oslo_vmware.rw_handles [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2303.929589] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a171d52-5865-4d56-b4ff-f52138d8b0bf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.937650] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b425695-cce0-405b-8969-d3fddb447c91 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.967135] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51106d8-5b12-4491-9757-938298b99884 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.974611] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a940313f-6fad-4582-b85d-c528aecab743 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.988083] env[62813]: DEBUG nova.compute.provider_tree [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2303.996661] env[62813]: DEBUG nova.scheduler.client.report [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2304.010410] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2304.010913] env[62813]: ERROR nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2304.010913] env[62813]: Faults: ['InvalidArgument'] [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Traceback (most recent call last): [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self.driver.spawn(context, instance, image_meta, [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self._fetch_image_if_missing(context, vi) [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] image_cache(vi, tmp_image_ds_loc) [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] vm_util.copy_virtual_disk( [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] session._wait_for_task(vmdk_copy_task) [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] return self.wait_for_task(task_ref) [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] return evt.wait() [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] result = hub.switch() [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] return self.greenlet.switch() [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] self.f(*self.args, **self.kw) [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] raise exceptions.translate_fault(task_info.error) [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Faults: ['InvalidArgument'] [ 2304.010913] env[62813]: ERROR nova.compute.manager [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] [ 2304.011776] env[62813]: DEBUG nova.compute.utils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2304.013059] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Build of instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f was re-scheduled: A specified parameter was not correct: fileType [ 2304.013059] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2304.013453] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2304.013626] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2304.013796] env[62813]: DEBUG nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2304.013961] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2304.347766] env[62813]: DEBUG nova.network.neutron [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2304.361482] env[62813]: INFO nova.compute.manager [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Took 0.34 seconds to deallocate network for instance. [ 2304.460295] env[62813]: INFO nova.scheduler.client.report [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Deleted allocations for instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f [ 2304.481766] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6cec1000-e772-4190-ae51-da9279f375a1 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.027s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2304.481927] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 372.061s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2304.482091] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Acquiring lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2304.482769] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2304.482769] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2304.484618] env[62813]: INFO nova.compute.manager [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Terminating instance [ 2304.486576] env[62813]: DEBUG nova.compute.manager [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2304.486803] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2304.487246] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6020aee-5352-4c1a-bd64-5fb7058fa600 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.498099] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09bfc46-ee9e-4fbc-af3b-e381dfed77a5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.526968] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7f344eb3-b1a2-454f-a647-2d9ec7da915f could not be found. [ 2304.527203] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2304.527607] env[62813]: INFO nova.compute.manager [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2304.527872] env[62813]: DEBUG oslo.service.loopingcall [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2304.528323] env[62813]: DEBUG nova.compute.manager [-] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2304.528439] env[62813]: DEBUG nova.network.neutron [-] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2304.553819] env[62813]: DEBUG nova.network.neutron [-] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2304.562225] env[62813]: INFO nova.compute.manager [-] [instance: 7f344eb3-b1a2-454f-a647-2d9ec7da915f] Took 0.03 seconds to deallocate network for instance. [ 2304.656064] env[62813]: DEBUG oslo_concurrency.lockutils [None req-8f175380-b511-485d-b6f1-69d425a60ed4 tempest-ServerActionsTestOtherA-930812209 tempest-ServerActionsTestOtherA-930812209-project-member] Lock "7f344eb3-b1a2-454f-a647-2d9ec7da915f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2305.159079] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.801676] env[62813]: DEBUG oslo_concurrency.lockutils [None req-d40bbce0-7085-462b-87fd-c14be856e145 tempest-ServersAaction247Test-1470071726 tempest-ServersAaction247Test-1470071726-project-member] Acquiring lock "10ce3bdd-0caa-47ff-bd11-90c038cc6be8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.164609] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.165112] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2347.176276] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] There are 0 instances to clean {{(pid=62813) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2347.577288] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.577702] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Getting list of instances from cluster (obj){ [ 2347.577702] env[62813]: value = "domain-c8" [ 2347.577702] env[62813]: _type = "ClusterComputeResource" [ 2347.577702] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2347.578791] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8436ad3-34a9-4d61-9b01-623825965d30 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.594497] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Got total of 8 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2349.198818] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.165267] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2351.165610] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2352.588103] env[62813]: WARNING oslo_vmware.rw_handles [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2352.588103] env[62813]: ERROR oslo_vmware.rw_handles [ 2352.588933] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2352.590666] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2352.590951] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Copying Virtual Disk [datastore2] vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/1412c443-1f90-44bc-9ff5-8718fae67e5d/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2352.591288] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-629983ce-7fd5-4e61-ac17-fd477ba5281b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.599686] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for the task: (returnval){ [ 2352.599686] env[62813]: value = "task-4267804" [ 2352.599686] env[62813]: _type = "Task" [ 2352.599686] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.609196] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Task: {'id': task-4267804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.110687] env[62813]: DEBUG oslo_vmware.exceptions [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2353.110687] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2353.111444] env[62813]: ERROR nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2353.111444] env[62813]: Faults: ['InvalidArgument'] [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Traceback (most recent call last): [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] yield resources [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self.driver.spawn(context, instance, image_meta, [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self._fetch_image_if_missing(context, vi) [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] image_cache(vi, tmp_image_ds_loc) [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] vm_util.copy_virtual_disk( [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] session._wait_for_task(vmdk_copy_task) [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] return self.wait_for_task(task_ref) [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] return evt.wait() [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] result = hub.switch() [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] return self.greenlet.switch() [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self.f(*self.args, **self.kw) [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] raise exceptions.translate_fault(task_info.error) [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Faults: ['InvalidArgument'] [ 2353.111444] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] [ 2353.112490] env[62813]: INFO nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Terminating instance [ 2353.113714] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2353.113904] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2353.114170] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d35cb100-c775-497f-8285-779d14d9cd8b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.116562] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2353.116771] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2353.117611] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3be652-eb7a-4cb5-8fed-8e1deb96fe97 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.125526] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2353.125766] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58a6d5a7-1551-4c0b-84af-d6d88324182a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.128205] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2353.128402] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2353.129468] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d990d84d-6db0-4656-b9f3-acebc9645c0d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.134657] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 2353.134657] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]522a1c4a-4aac-fe3f-9fa2-a41fdd26aa99" [ 2353.134657] env[62813]: _type = "Task" [ 2353.134657] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.143342] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]522a1c4a-4aac-fe3f-9fa2-a41fdd26aa99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.164098] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.206281] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2353.206555] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2353.206759] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Deleting the datastore file [datastore2] dcc68892-3e75-4da9-975a-5b41c69205f7 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2353.207048] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2391ac0-bdaa-41fc-b90e-85f503929800 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.213961] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for the task: (returnval){ [ 2353.213961] env[62813]: value = "task-4267806" [ 2353.213961] env[62813]: _type = "Task" [ 2353.213961] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.222949] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Task: {'id': task-4267806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.645137] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2353.646177] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating directory with path [datastore2] vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2353.646177] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58372ec1-b1f3-4840-bed8-bed34e3558fd {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.659363] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Created directory with path [datastore2] vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2353.659594] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Fetch image to [datastore2] vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2353.659719] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2353.660525] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0de0c11-9221-4053-9f4e-a1e742e98eb4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.668480] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3634b0c9-51b6-4559-8337-233a9e7a01db {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.678574] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104da8fe-59a9-4ff3-9d44-932fde060bf8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.710548] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd87bd3-2648-40b7-a37e-e1ffa7548026 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.719989] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-808a433c-abe1-4732-9ac3-5a9800535473 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.724723] env[62813]: DEBUG oslo_vmware.api [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Task: {'id': task-4267806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074265} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.725405] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2353.725645] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2353.725833] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2353.726019] env[62813]: INFO nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2353.728296] env[62813]: DEBUG nova.compute.claims [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2353.728471] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2353.728706] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2353.750862] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2353.812714] env[62813]: DEBUG oslo_vmware.rw_handles [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2353.873845] env[62813]: DEBUG oslo_vmware.rw_handles [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2353.874045] env[62813]: DEBUG oslo_vmware.rw_handles [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2353.947871] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8821e504-4c01-4b47-b97d-ef480fb7ef2b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.957072] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3851e6-2f76-46b9-a6a3-7c43a2db73b8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.986488] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e66ce68-158d-4de1-b147-783bdb88ff77 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.994776] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b95fe06-5eb4-412e-8916-c1fc144ccf32 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.009798] env[62813]: DEBUG nova.compute.provider_tree [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2354.018388] env[62813]: DEBUG nova.scheduler.client.report [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2354.032620] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.033195] env[62813]: ERROR nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2354.033195] env[62813]: Faults: ['InvalidArgument'] [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Traceback (most recent call last): [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self.driver.spawn(context, instance, image_meta, [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self._fetch_image_if_missing(context, vi) [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] image_cache(vi, tmp_image_ds_loc) [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] vm_util.copy_virtual_disk( [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] session._wait_for_task(vmdk_copy_task) [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] return self.wait_for_task(task_ref) [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] return evt.wait() [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] result = hub.switch() [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] return self.greenlet.switch() [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] self.f(*self.args, **self.kw) [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] raise exceptions.translate_fault(task_info.error) [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Faults: ['InvalidArgument'] [ 2354.033195] env[62813]: ERROR nova.compute.manager [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] [ 2354.034081] env[62813]: DEBUG nova.compute.utils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2354.035856] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Build of instance dcc68892-3e75-4da9-975a-5b41c69205f7 was re-scheduled: A specified parameter was not correct: fileType [ 2354.035856] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2354.036279] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2354.036452] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2354.036626] env[62813]: DEBUG nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2354.036791] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2354.442809] env[62813]: DEBUG nova.network.neutron [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2354.454342] env[62813]: INFO nova.compute.manager [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Took 0.42 seconds to deallocate network for instance. [ 2354.563104] env[62813]: INFO nova.scheduler.client.report [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Deleted allocations for instance dcc68892-3e75-4da9-975a-5b41c69205f7 [ 2354.581795] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0a290cb-3ec3-4e6b-8a55-bbb9d77ccd2b tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 567.583s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.582073] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 371.569s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2354.582308] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "dcc68892-3e75-4da9-975a-5b41c69205f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2354.582522] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2354.582693] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.585800] env[62813]: INFO nova.compute.manager [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Terminating instance [ 2354.587916] env[62813]: DEBUG nova.compute.manager [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2354.588157] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2354.588702] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ebe2cbfe-e22c-4873-89c7-ad3e60ed607f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.598035] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcf6739-20dd-44b3-9f4e-1c223c25d85d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.630165] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dcc68892-3e75-4da9-975a-5b41c69205f7 could not be found. [ 2354.630327] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2354.630453] env[62813]: INFO nova.compute.manager [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2354.630785] env[62813]: DEBUG oslo.service.loopingcall [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2354.631088] env[62813]: DEBUG nova.compute.manager [-] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2354.631813] env[62813]: DEBUG nova.network.neutron [-] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2354.680212] env[62813]: DEBUG nova.network.neutron [-] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2354.689886] env[62813]: INFO nova.compute.manager [-] [instance: dcc68892-3e75-4da9-975a-5b41c69205f7] Took 0.06 seconds to deallocate network for instance. [ 2354.790893] env[62813]: DEBUG oslo_concurrency.lockutils [None req-c0d21daa-5196-4a2b-97c7-366ed1a3ac6f tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Lock "dcc68892-3e75-4da9-975a-5b41c69205f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2359.164576] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.165057] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2359.165143] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2359.183142] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.183387] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.183595] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.183809] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.184043] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.184277] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.184491] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2359.184691] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2359.185402] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.541354] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.560132] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Getting list of instances from cluster (obj){ [ 2359.560132] env[62813]: value = "domain-c8" [ 2359.560132] env[62813]: _type = "ClusterComputeResource" [ 2359.560132] env[62813]: } {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2359.561929] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dda21ba-531c-4993-8740-c42c4b6f85a8 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.584088] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Got total of 7 instances {{(pid=62813) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2359.584429] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 0dba8b6e-7927-432c-bd13-f5ce58f0c991 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.584790] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.585134] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 926a846d-f902-4ec3-898e-439f10b4ee68 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.585443] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 366200bc-8852-45a3-be8b-016265dbfed1 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.585739] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 10ce3bdd-0caa-47ff-bd11-90c038cc6be8 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.586044] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid 364c3489-27d4-47c9-a447-7ca4af197f67 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.586345] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Triggering sync for uuid e7283591-30ac-4132-9b7f-d407a82e9b87 {{(pid=62813) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2359.586762] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.587202] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "769103f0-9ebd-4a7a-825f-bf7456cb6eb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.587584] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "926a846d-f902-4ec3-898e-439f10b4ee68" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.587927] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "366200bc-8852-45a3-be8b-016265dbfed1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.588299] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "10ce3bdd-0caa-47ff-bd11-90c038cc6be8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.588642] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "364c3489-27d4-47c9-a447-7ca4af197f67" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.589007] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "e7283591-30ac-4132-9b7f-d407a82e9b87" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.212448] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.164104] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.174557] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.186674] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.186924] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.187109] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.187273] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2362.188398] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83a648c-65da-4cb5-921e-cda9a37f135f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.197856] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb5017c-b5f3-4ab1-ae3a-55c56b75ae15 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.212507] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b1f16a-5f86-4cb9-a3d1-19f23bbb9a38 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.219972] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11bdccf-265c-4770-b58d-2aba944b24d3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.251103] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180768MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2362.251333] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.251468] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.341729] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.341913] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.342060] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.342190] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.342310] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 10ce3bdd-0caa-47ff-bd11-90c038cc6be8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.342429] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 364c3489-27d4-47c9-a447-7ca4af197f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.342547] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e7283591-30ac-4132-9b7f-d407a82e9b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2362.342746] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2362.342963] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=225GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2362.359841] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing inventories for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2362.374821] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating ProviderTree inventory for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2362.375068] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Updating inventory in ProviderTree for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2362.388826] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing aggregate associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, aggregates: None {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2362.410164] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Refreshing trait associations for resource provider 49efdf20-78bc-435f-a902-9cc99ed395f2, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62813) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2362.509675] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4d5702-c570-44c0-8694-42118b407472 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.518233] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c67c58-1b93-4ec0-b2ed-359124bbf1c9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.548455] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad20cd16-4e78-4cf2-a4da-47926e53703c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.557037] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a202d40-0761-452b-8b8a-3c39c3355712 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.570945] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2362.581313] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2362.605641] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2362.605820] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.354s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.595215] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.159575] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.164316] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.164476] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Cleaning up deleted instances with incomplete migration {{(pid=62813) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2369.168900] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.603301] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "a7835289-7896-491a-9c8a-df83f79fa457" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.603716] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Lock "a7835289-7896-491a-9c8a-df83f79fa457" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.615125] env[62813]: DEBUG nova.compute.manager [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2375.671118] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.671448] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.673044] env[62813]: INFO nova.compute.claims [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2375.879713] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3398003-ac4d-4b3f-af2c-733c3b0fc6f0 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.889585] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4982f270-0301-4677-884f-a0a0e52a05a3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.922073] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c3a560-4308-45f0-baa5-1588fac18a84 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.931140] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c284496-6663-477e-8c9b-ab660d046538 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.947724] env[62813]: DEBUG nova.compute.provider_tree [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2375.957434] env[62813]: DEBUG nova.scheduler.client.report [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2375.978221] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.307s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.978712] env[62813]: DEBUG nova.compute.manager [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2376.016183] env[62813]: DEBUG nova.compute.utils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2376.017493] env[62813]: DEBUG nova.compute.manager [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2376.017672] env[62813]: DEBUG nova.network.neutron [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2376.029658] env[62813]: DEBUG nova.compute.manager [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2376.082518] env[62813]: DEBUG nova.policy [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8176143266e84a458bfc13cc66983203', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87677daaac17461a97538cd8740330f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2376.104267] env[62813]: DEBUG nova.compute.manager [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2376.132632] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2376.132887] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2376.133092] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2376.133334] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2376.133494] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2376.133646] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2376.133859] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2376.134030] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2376.134208] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2376.134379] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2376.134548] env[62813]: DEBUG nova.virt.hardware [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2376.135473] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b130eaeb-80fa-481e-9fe9-2a2940d01b7f {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.143809] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc84b488-859b-4c82-9122-d0593da2075d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.470226] env[62813]: DEBUG nova.network.neutron [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Successfully created port: 06a29b03-f332-4627-8520-95ac7ba03caf {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2377.093169] env[62813]: DEBUG nova.compute.manager [req-fd54eaf9-4fd6-43ef-84da-59dcb192c351 req-4461453d-b945-4f0d-9d83-09da3ce333d9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Received event network-vif-plugged-06a29b03-f332-4627-8520-95ac7ba03caf {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2377.093429] env[62813]: DEBUG oslo_concurrency.lockutils [req-fd54eaf9-4fd6-43ef-84da-59dcb192c351 req-4461453d-b945-4f0d-9d83-09da3ce333d9 service nova] Acquiring lock "a7835289-7896-491a-9c8a-df83f79fa457-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.093630] env[62813]: DEBUG oslo_concurrency.lockutils [req-fd54eaf9-4fd6-43ef-84da-59dcb192c351 req-4461453d-b945-4f0d-9d83-09da3ce333d9 service nova] Lock "a7835289-7896-491a-9c8a-df83f79fa457-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2377.093805] env[62813]: DEBUG oslo_concurrency.lockutils [req-fd54eaf9-4fd6-43ef-84da-59dcb192c351 req-4461453d-b945-4f0d-9d83-09da3ce333d9 service nova] Lock "a7835289-7896-491a-9c8a-df83f79fa457-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2377.093974] env[62813]: DEBUG nova.compute.manager [req-fd54eaf9-4fd6-43ef-84da-59dcb192c351 req-4461453d-b945-4f0d-9d83-09da3ce333d9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] No waiting events found dispatching network-vif-plugged-06a29b03-f332-4627-8520-95ac7ba03caf {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2377.094196] env[62813]: WARNING nova.compute.manager [req-fd54eaf9-4fd6-43ef-84da-59dcb192c351 req-4461453d-b945-4f0d-9d83-09da3ce333d9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Received unexpected event network-vif-plugged-06a29b03-f332-4627-8520-95ac7ba03caf for instance with vm_state building and task_state spawning. [ 2377.192453] env[62813]: DEBUG nova.network.neutron [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Successfully updated port: 06a29b03-f332-4627-8520-95ac7ba03caf {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2377.212633] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "refresh_cache-a7835289-7896-491a-9c8a-df83f79fa457" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2377.212948] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquired lock "refresh_cache-a7835289-7896-491a-9c8a-df83f79fa457" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2377.213182] env[62813]: DEBUG nova.network.neutron [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2377.249721] env[62813]: DEBUG oslo_concurrency.lockutils [None req-5a31008d-df07-45e3-b6d3-6c86d388f848 tempest-AttachVolumeTestJSON-110042237 tempest-AttachVolumeTestJSON-110042237-project-member] Acquiring lock "364c3489-27d4-47c9-a447-7ca4af197f67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.276064] env[62813]: DEBUG nova.network.neutron [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2377.465886] env[62813]: DEBUG nova.network.neutron [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Updating instance_info_cache with network_info: [{"id": "06a29b03-f332-4627-8520-95ac7ba03caf", "address": "fa:16:3e:03:0a:3f", "network": {"id": "3e659b4f-671d-4144-82d6-4e80fcf365c9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1216202715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87677daaac17461a97538cd8740330f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a29b03-f3", "ovs_interfaceid": "06a29b03-f332-4627-8520-95ac7ba03caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2377.480825] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Releasing lock "refresh_cache-a7835289-7896-491a-9c8a-df83f79fa457" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2377.481245] env[62813]: DEBUG nova.compute.manager [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Instance network_info: |[{"id": "06a29b03-f332-4627-8520-95ac7ba03caf", "address": "fa:16:3e:03:0a:3f", "network": {"id": "3e659b4f-671d-4144-82d6-4e80fcf365c9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1216202715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87677daaac17461a97538cd8740330f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a29b03-f3", "ovs_interfaceid": "06a29b03-f332-4627-8520-95ac7ba03caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2377.482043] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:0a:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b5f9472-1844-4c99-8804-8f193cfff562', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06a29b03-f332-4627-8520-95ac7ba03caf', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2377.491511] env[62813]: DEBUG oslo.service.loopingcall [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2377.492216] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2377.492541] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ccf667f-6122-4650-ba60-1a5e13715750 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.520412] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2377.520412] env[62813]: value = "task-4267807" [ 2377.520412] env[62813]: _type = "Task" [ 2377.520412] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2377.529668] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267807, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.031114] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267807, 'name': CreateVM_Task, 'duration_secs': 0.315213} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2378.031329] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2378.031964] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2378.032148] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2378.032524] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2378.032781] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf3156a0-bcf8-43b9-b31d-5296e57d194e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.037589] env[62813]: DEBUG oslo_vmware.api [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Waiting for the task: (returnval){ [ 2378.037589] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]52000642-e8dd-5b6f-a98c-b249d235f2fb" [ 2378.037589] env[62813]: _type = "Task" [ 2378.037589] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2378.045788] env[62813]: DEBUG oslo_vmware.api [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]52000642-e8dd-5b6f-a98c-b249d235f2fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2378.548328] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2378.548695] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2378.548877] env[62813]: DEBUG oslo_concurrency.lockutils [None req-e6a2e3ac-773e-4c84-a82f-acd150d00e41 tempest-AttachVolumeNegativeTest-1862482780 tempest-AttachVolumeNegativeTest-1862482780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2379.121194] env[62813]: DEBUG nova.compute.manager [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Received event network-changed-06a29b03-f332-4627-8520-95ac7ba03caf {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2379.121194] env[62813]: DEBUG nova.compute.manager [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Refreshing instance network info cache due to event network-changed-06a29b03-f332-4627-8520-95ac7ba03caf. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2379.121194] env[62813]: DEBUG oslo_concurrency.lockutils [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] Acquiring lock "refresh_cache-a7835289-7896-491a-9c8a-df83f79fa457" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2379.121194] env[62813]: DEBUG oslo_concurrency.lockutils [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] Acquired lock "refresh_cache-a7835289-7896-491a-9c8a-df83f79fa457" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.121194] env[62813]: DEBUG nova.network.neutron [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Refreshing network info cache for port 06a29b03-f332-4627-8520-95ac7ba03caf {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2379.407308] env[62813]: DEBUG nova.network.neutron [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Updated VIF entry in instance network info cache for port 06a29b03-f332-4627-8520-95ac7ba03caf. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2379.407308] env[62813]: DEBUG nova.network.neutron [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Updating instance_info_cache with network_info: [{"id": "06a29b03-f332-4627-8520-95ac7ba03caf", "address": "fa:16:3e:03:0a:3f", "network": {"id": "3e659b4f-671d-4144-82d6-4e80fcf365c9", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1216202715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87677daaac17461a97538cd8740330f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a29b03-f3", "ovs_interfaceid": "06a29b03-f332-4627-8520-95ac7ba03caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2379.416494] env[62813]: DEBUG oslo_concurrency.lockutils [req-225e6ce8-727e-42cf-a2ee-d82bc8228830 req-ade876a6-fbd7-4dc8-81f3-5d6d127e54b9 service nova] Releasing lock "refresh_cache-a7835289-7896-491a-9c8a-df83f79fa457" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2400.089548] env[62813]: WARNING oslo_vmware.rw_handles [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles response.begin() [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2400.089548] env[62813]: ERROR oslo_vmware.rw_handles [ 2400.090253] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Downloaded image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2400.092258] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Caching image {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2400.092555] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Copying Virtual Disk [datastore2] vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk to [datastore2] vmware_temp/77db2000-06c3-40fb-9933-070fbb914fe2/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk {{(pid=62813) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2400.092883] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc50cc84-ea4f-4c39-bd8b-6b8beac0986a {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.102361] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 2400.102361] env[62813]: value = "task-4267808" [ 2400.102361] env[62813]: _type = "Task" [ 2400.102361] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2400.111533] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.613436] env[62813]: DEBUG oslo_vmware.exceptions [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Fault InvalidArgument not matched. {{(pid=62813) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2400.613760] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2400.614359] env[62813]: ERROR nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2400.614359] env[62813]: Faults: ['InvalidArgument'] [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Traceback (most recent call last): [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] yield resources [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self.driver.spawn(context, instance, image_meta, [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self._fetch_image_if_missing(context, vi) [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] image_cache(vi, tmp_image_ds_loc) [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] vm_util.copy_virtual_disk( [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] session._wait_for_task(vmdk_copy_task) [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] return self.wait_for_task(task_ref) [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] return evt.wait() [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] result = hub.switch() [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] return self.greenlet.switch() [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self.f(*self.args, **self.kw) [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] raise exceptions.translate_fault(task_info.error) [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Faults: ['InvalidArgument'] [ 2400.614359] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] [ 2400.615212] env[62813]: INFO nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Terminating instance [ 2400.616438] env[62813]: DEBUG oslo_concurrency.lockutils [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2400.616680] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2400.616937] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c08051b8-0288-4882-b7bf-d44a8fd44453 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.619353] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2400.619598] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2400.620414] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a00b7f7-01e4-43b9-a3b2-74ed13975f26 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.628296] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Unregistering the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2400.628490] env[62813]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c361c904-82ac-4094-8eac-98af6657d2e5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.631019] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2400.631209] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62813) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2400.632310] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c48f368f-4f8c-4a8c-97b3-678bb1596446 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.637893] env[62813]: DEBUG oslo_vmware.api [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Waiting for the task: (returnval){ [ 2400.637893] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]527276e0-c613-becb-50d1-db30a20b1958" [ 2400.637893] env[62813]: _type = "Task" [ 2400.637893] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2400.648020] env[62813]: DEBUG oslo_vmware.api [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]527276e0-c613-becb-50d1-db30a20b1958, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.698509] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Unregistered the VM {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2400.698751] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Deleting contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2400.698889] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleting the datastore file [datastore2] 0dba8b6e-7927-432c-bd13-f5ce58f0c991 {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2400.699201] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d82278c0-6b4d-4f30-b27e-e69292cff2f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.705782] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for the task: (returnval){ [ 2400.705782] env[62813]: value = "task-4267810" [ 2400.705782] env[62813]: _type = "Task" [ 2400.705782] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2400.713998] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.149907] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Preparing fetch location {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2401.150251] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Creating directory with path [datastore2] vmware_temp/767a320e-dde7-4b34-a421-1070c8e1c633/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2401.150460] env[62813]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7181726-c480-48e5-a976-da20d0c00f7d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.164430] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Created directory with path [datastore2] vmware_temp/767a320e-dde7-4b34-a421-1070c8e1c633/f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2401.164641] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Fetch image to [datastore2] vmware_temp/767a320e-dde7-4b34-a421-1070c8e1c633/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2401.164793] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to [datastore2] vmware_temp/767a320e-dde7-4b34-a421-1070c8e1c633/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk on the data store datastore2 {{(pid=62813) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2401.165661] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe50f41-8324-498a-b468-8ad5abaabaed {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.173244] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffd2995-0bc1-4a36-8a17-83c2cb781400 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.183633] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdc988d-d202-4464-93b6-7206f5397e13 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.218176] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a75d4c-6764-437d-b9a4-fb41a98c6e50 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.227584] env[62813]: DEBUG oslo_vmware.api [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Task: {'id': task-4267810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078879} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.228192] env[62813]: DEBUG nova.virt.vmwareapi.ds_util [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleted the datastore file {{(pid=62813) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2401.228389] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Deleted contents of the VM from datastore datastore2 {{(pid=62813) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2401.228569] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2401.228749] env[62813]: INFO nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2401.230341] env[62813]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-49025da6-e250-45ba-ab70-aab6ca985f24 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.232496] env[62813]: DEBUG nova.compute.claims [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Aborting claim: {{(pid=62813) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2401.232677] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2401.232889] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2401.257030] env[62813]: DEBUG nova.virt.vmwareapi.images [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Downloading image file data f6ee7c32-a26c-4731-80b9-1e546ea30e47 to the data store datastore2 {{(pid=62813) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2401.320123] env[62813]: DEBUG oslo_vmware.rw_handles [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/767a320e-dde7-4b34-a421-1070c8e1c633/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2401.381057] env[62813]: DEBUG oslo_vmware.rw_handles [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Completed reading data from the image iterator. {{(pid=62813) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2401.381258] env[62813]: DEBUG oslo_vmware.rw_handles [None req-bbe30b90-e39d-44b5-9d3b-1f77e8ab34df tempest-ServerActionsTestJSON-398058233 tempest-ServerActionsTestJSON-398058233-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/767a320e-dde7-4b34-a421-1070c8e1c633/f6ee7c32-a26c-4731-80b9-1e546ea30e47/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62813) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2401.452147] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19617260-ae6e-43f1-a4bf-86452496a8cc {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.460479] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c584087a-e0ba-48d7-a769-b56d7ddf161e {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.494833] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6510ef4f-f915-4fef-88a3-117fc5c63c51 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.503379] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2717e272-177d-44dd-94db-c8d91d5d79a5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.518077] env[62813]: DEBUG nova.compute.provider_tree [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2401.529511] env[62813]: DEBUG nova.scheduler.client.report [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2401.545839] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.313s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.546495] env[62813]: ERROR nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2401.546495] env[62813]: Faults: ['InvalidArgument'] [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Traceback (most recent call last): [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self.driver.spawn(context, instance, image_meta, [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self._fetch_image_if_missing(context, vi) [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] image_cache(vi, tmp_image_ds_loc) [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] vm_util.copy_virtual_disk( [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] session._wait_for_task(vmdk_copy_task) [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] return self.wait_for_task(task_ref) [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] return evt.wait() [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] result = hub.switch() [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] return self.greenlet.switch() [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] self.f(*self.args, **self.kw) [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] raise exceptions.translate_fault(task_info.error) [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Faults: ['InvalidArgument'] [ 2401.546495] env[62813]: ERROR nova.compute.manager [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] [ 2401.547337] env[62813]: DEBUG nova.compute.utils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] VimFaultException {{(pid=62813) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2401.549208] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Build of instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 was re-scheduled: A specified parameter was not correct: fileType [ 2401.549208] env[62813]: Faults: ['InvalidArgument'] {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2401.549619] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Unplugging VIFs for instance {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2401.549803] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62813) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2401.550181] env[62813]: DEBUG nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2401.550181] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2401.845220] env[62813]: DEBUG nova.network.neutron [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2401.857054] env[62813]: INFO nova.compute.manager [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Took 0.31 seconds to deallocate network for instance. [ 2401.959025] env[62813]: INFO nova.scheduler.client.report [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Deleted allocations for instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 [ 2401.981039] env[62813]: DEBUG oslo_concurrency.lockutils [None req-963e4cbb-e8d9-4e19-bca1-93f6c749c133 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 580.153s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.981228] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 384.703s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2401.981430] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2401.982240] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2401.983278] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2401.985030] env[62813]: INFO nova.compute.manager [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Terminating instance [ 2401.986751] env[62813]: DEBUG nova.compute.manager [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Start destroying the instance on the hypervisor. {{(pid=62813) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2401.986951] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Destroying instance {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2401.987792] env[62813]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0168ea5d-5de8-4266-a2c4-ed5b2e929dbe {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.999314] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7e4c00-8670-48d3-b80c-e9f27ac723a5 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.031050] env[62813]: WARNING nova.virt.vmwareapi.vmops [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0dba8b6e-7927-432c-bd13-f5ce58f0c991 could not be found. [ 2402.031307] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Instance destroyed {{(pid=62813) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2402.031646] env[62813]: INFO nova.compute.manager [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2402.031976] env[62813]: DEBUG oslo.service.loopingcall [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2402.032293] env[62813]: DEBUG nova.compute.manager [-] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Deallocating network for instance {{(pid=62813) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2402.032413] env[62813]: DEBUG nova.network.neutron [-] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] deallocate_for_instance() {{(pid=62813) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2402.081575] env[62813]: DEBUG nova.network.neutron [-] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Updating instance_info_cache with network_info: [] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2402.090525] env[62813]: INFO nova.compute.manager [-] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] Took 0.06 seconds to deallocate network for instance. [ 2402.190469] env[62813]: DEBUG oslo_concurrency.lockutils [None req-6f008691-ef89-416b-a4d9-d8ad7a53cdd1 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2402.191388] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 42.605s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2402.191580] env[62813]: INFO nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 0dba8b6e-7927-432c-bd13-f5ce58f0c991] During sync_power_state the instance has a pending task (deleting). Skip. [ 2402.191757] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "0dba8b6e-7927-432c-bd13-f5ce58f0c991" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2410.216106] env[62813]: DEBUG oslo_concurrency.lockutils [None req-1c8d2496-28db-4927-91f0-0ab20d2c9122 tempest-ServersTestJSON-661015703 tempest-ServersTestJSON-661015703-project-member] Acquiring lock "e7283591-30ac-4132-9b7f-d407a82e9b87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.163900] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2412.164639] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2412.164985] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62813) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2415.164642] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.164876] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.904649] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquiring lock "71ee7b9e-55fb-470e-95c5-1106f87720dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.904881] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Lock "71ee7b9e-55fb-470e-95c5-1106f87720dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.917315] env[62813]: DEBUG nova.compute.manager [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Starting instance... {{(pid=62813) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2420.003593] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2420.003856] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2420.005350] env[62813]: INFO nova.compute.claims [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2420.176113] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03a03d4-186f-47f4-a3d5-e00b3fae090b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.185049] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52de0dc7-3929-40a6-9be6-c2829534de24 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.219328] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf89e500-4434-4291-b5a7-5cc1f7c6b8c3 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.227407] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21daed67-edc0-4932-bf7c-11eff74900de {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.241136] env[62813]: DEBUG nova.compute.provider_tree [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2420.250779] env[62813]: DEBUG nova.scheduler.client.report [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2420.265157] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2420.265789] env[62813]: DEBUG nova.compute.manager [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Start building networks asynchronously for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2420.302475] env[62813]: DEBUG nova.compute.utils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Using /dev/sd instead of None {{(pid=62813) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2420.303747] env[62813]: DEBUG nova.compute.manager [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Allocating IP information in the background. {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2420.303920] env[62813]: DEBUG nova.network.neutron [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] allocate_for_instance() {{(pid=62813) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2420.324496] env[62813]: DEBUG nova.compute.manager [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Start building block device mappings for instance. {{(pid=62813) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2420.374485] env[62813]: DEBUG nova.policy [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'caa20b701bf044fe9721834ec56938b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09fd808e93b546f580f0089011d00ff7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62813) authorize /opt/stack/nova/nova/policy.py:203}} [ 2420.395558] env[62813]: DEBUG nova.compute.manager [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Start spawning the instance on the hypervisor. {{(pid=62813) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2420.422208] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-07T07:59:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-07T07:59:24Z,direct_url=,disk_format='vmdk',id=f6ee7c32-a26c-4731-80b9-1e546ea30e47,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9ff9378f181e456fa241a7d30ef08cfa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-07T07:59:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2420.422474] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Flavor limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2420.422637] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Image limits 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2420.423059] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Flavor pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2420.423263] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Image pref 0:0:0 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2420.423473] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62813) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2420.423690] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2420.423875] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2420.424076] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Got 1 possible topologies {{(pid=62813) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2420.424251] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2420.424429] env[62813]: DEBUG nova.virt.hardware [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62813) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2420.425636] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22718dc1-eeef-42be-be6a-8d583c737d71 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.434338] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6cb32e-df2c-4695-b3f9-53a33061efdf {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.821875] env[62813]: DEBUG nova.network.neutron [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Successfully created port: 1ef27b99-4f5d-496a-a533-c4e5b1867070 {{(pid=62813) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2421.164294] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.164438] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Starting heal instance info cache {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2421.164541] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Rebuilding the list of instances to heal {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2421.187112] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 769103f0-9ebd-4a7a-825f-bf7456cb6eb9] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.187383] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 926a846d-f902-4ec3-898e-439f10b4ee68] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.187510] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 366200bc-8852-45a3-be8b-016265dbfed1] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.187679] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 10ce3bdd-0caa-47ff-bd11-90c038cc6be8] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.187868] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 364c3489-27d4-47c9-a447-7ca4af197f67] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.188040] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: e7283591-30ac-4132-9b7f-d407a82e9b87] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.188203] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: a7835289-7896-491a-9c8a-df83f79fa457] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.188359] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Skipping network cache update for instance because it is Building. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2421.188522] env[62813]: DEBUG nova.compute.manager [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Didn't find any instances for network info cache update. {{(pid=62813) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2421.494460] env[62813]: DEBUG nova.compute.manager [req-2b05528f-1b6e-44dc-b18b-206223e767c8 req-ae68f0b0-34ff-40dd-9d9b-77a22a426732 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Received event network-vif-plugged-1ef27b99-4f5d-496a-a533-c4e5b1867070 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2421.494719] env[62813]: DEBUG oslo_concurrency.lockutils [req-2b05528f-1b6e-44dc-b18b-206223e767c8 req-ae68f0b0-34ff-40dd-9d9b-77a22a426732 service nova] Acquiring lock "71ee7b9e-55fb-470e-95c5-1106f87720dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2421.494963] env[62813]: DEBUG oslo_concurrency.lockutils [req-2b05528f-1b6e-44dc-b18b-206223e767c8 req-ae68f0b0-34ff-40dd-9d9b-77a22a426732 service nova] Lock "71ee7b9e-55fb-470e-95c5-1106f87720dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2421.496491] env[62813]: DEBUG oslo_concurrency.lockutils [req-2b05528f-1b6e-44dc-b18b-206223e767c8 req-ae68f0b0-34ff-40dd-9d9b-77a22a426732 service nova] Lock "71ee7b9e-55fb-470e-95c5-1106f87720dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2421.496759] env[62813]: DEBUG nova.compute.manager [req-2b05528f-1b6e-44dc-b18b-206223e767c8 req-ae68f0b0-34ff-40dd-9d9b-77a22a426732 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] No waiting events found dispatching network-vif-plugged-1ef27b99-4f5d-496a-a533-c4e5b1867070 {{(pid=62813) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2421.497026] env[62813]: WARNING nova.compute.manager [req-2b05528f-1b6e-44dc-b18b-206223e767c8 req-ae68f0b0-34ff-40dd-9d9b-77a22a426732 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Received unexpected event network-vif-plugged-1ef27b99-4f5d-496a-a533-c4e5b1867070 for instance with vm_state building and task_state spawning. [ 2421.581603] env[62813]: DEBUG nova.network.neutron [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Successfully updated port: 1ef27b99-4f5d-496a-a533-c4e5b1867070 {{(pid=62813) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2421.591245] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquiring lock "refresh_cache-71ee7b9e-55fb-470e-95c5-1106f87720dc" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2421.591448] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquired lock "refresh_cache-71ee7b9e-55fb-470e-95c5-1106f87720dc" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2421.591569] env[62813]: DEBUG nova.network.neutron [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Building network info cache for instance {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2421.641113] env[62813]: DEBUG nova.network.neutron [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Instance cache missing network info. {{(pid=62813) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2421.820518] env[62813]: DEBUG nova.network.neutron [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Updating instance_info_cache with network_info: [{"id": "1ef27b99-4f5d-496a-a533-c4e5b1867070", "address": "fa:16:3e:57:b4:74", "network": {"id": "73ffc08f-76cb-4303-bff5-893b21dd1d58", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1929892940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09fd808e93b546f580f0089011d00ff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef27b99-4f", "ovs_interfaceid": "1ef27b99-4f5d-496a-a533-c4e5b1867070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2421.834760] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Releasing lock "refresh_cache-71ee7b9e-55fb-470e-95c5-1106f87720dc" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2421.835428] env[62813]: DEBUG nova.compute.manager [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Instance network_info: |[{"id": "1ef27b99-4f5d-496a-a533-c4e5b1867070", "address": "fa:16:3e:57:b4:74", "network": {"id": "73ffc08f-76cb-4303-bff5-893b21dd1d58", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1929892940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09fd808e93b546f580f0089011d00ff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef27b99-4f", "ovs_interfaceid": "1ef27b99-4f5d-496a-a533-c4e5b1867070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62813) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2421.836288] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:b4:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ef27b99-4f5d-496a-a533-c4e5b1867070', 'vif_model': 'vmxnet3'}] {{(pid=62813) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2421.843877] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Creating folder: Project (09fd808e93b546f580f0089011d00ff7). Parent ref: group-v840812. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2421.844537] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-247c2f34-8aaf-4989-8dba-3e61a18e12b9 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.855819] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Created folder: Project (09fd808e93b546f580f0089011d00ff7) in parent group-v840812. [ 2421.856136] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Creating folder: Instances. Parent ref: group-v840937. {{(pid=62813) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2421.856429] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31d16d3b-3395-4a63-adb0-ec85a41e99f4 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.866883] env[62813]: INFO nova.virt.vmwareapi.vm_util [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Created folder: Instances in parent group-v840937. [ 2421.866883] env[62813]: DEBUG oslo.service.loopingcall [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62813) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2421.867167] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Creating VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2421.867581] env[62813]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fe2faba-ef22-407a-8186-e40d08ebe5f2 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2421.901056] env[62813]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2421.901056] env[62813]: value = "task-4267813" [ 2421.901056] env[62813]: _type = "Task" [ 2421.901056] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2421.909644] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267813, 'name': CreateVM_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.164334] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2422.164582] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2422.177098] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2422.177566] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2422.177808] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2422.178106] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62813) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2422.179831] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b2bfaa-db8e-470e-b3ec-bc06061c9d4c {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.189402] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291ce539-0221-4877-a9a1-37e071837d22 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.206570] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfc5c60-f31c-4d3d-9b7f-1ce44bbf1432 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.214697] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a2f6aa-33f3-4633-bdeb-ed1957c86094 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.259860] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180721MB free_disk=222GB free_vcpus=48 pci_devices=None {{(pid=62813) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2422.260035] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2422.260260] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2422.334547] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 769103f0-9ebd-4a7a-825f-bf7456cb6eb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.334811] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 926a846d-f902-4ec3-898e-439f10b4ee68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.334974] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 366200bc-8852-45a3-be8b-016265dbfed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.335114] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 10ce3bdd-0caa-47ff-bd11-90c038cc6be8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.335241] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 364c3489-27d4-47c9-a447-7ca4af197f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.335364] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance e7283591-30ac-4132-9b7f-d407a82e9b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.335486] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance a7835289-7896-491a-9c8a-df83f79fa457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.335672] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Instance 71ee7b9e-55fb-470e-95c5-1106f87720dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62813) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2422.335918] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2422.336088] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=225GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62813) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2422.412122] env[62813]: DEBUG oslo_vmware.api [-] Task: {'id': task-4267813, 'name': CreateVM_Task, 'duration_secs': 0.34708} completed successfully. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2422.412319] env[62813]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Created VM on the ESX host {{(pid=62813) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2422.413213] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2422.413434] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2422.413765] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2422.414031] env[62813]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ea12349-4fd0-462f-ac0e-60809cddbc27 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.421397] env[62813]: DEBUG oslo_vmware.api [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Waiting for the task: (returnval){ [ 2422.421397] env[62813]: value = "session[5271f045-2207-6645-84f1-ada5de9b034a]524814eb-1d60-95bf-9b40-b6c720f912fa" [ 2422.421397] env[62813]: _type = "Task" [ 2422.421397] env[62813]: } to complete. {{(pid=62813) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2422.430204] env[62813]: DEBUG oslo_vmware.api [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Task: {'id': session[5271f045-2207-6645-84f1-ada5de9b034a]524814eb-1d60-95bf-9b40-b6c720f912fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62813) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2422.455386] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7691a27-0037-4356-8314-83929c01b33d {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.463168] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915d420f-8015-48c8-a031-705caca7a354 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.493270] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4926343b-5a62-48dc-90a8-96decfa2c26b {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.500814] env[62813]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5fb732-a6c1-46c1-b964-514150053f75 {{(pid=62813) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2422.515564] env[62813]: DEBUG nova.compute.provider_tree [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed in ProviderTree for provider: 49efdf20-78bc-435f-a902-9cc99ed395f2 {{(pid=62813) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2422.524243] env[62813]: DEBUG nova.scheduler.client.report [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Inventory has not changed for provider 49efdf20-78bc-435f-a902-9cc99ed395f2 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 405, 'reserved': 0, 'min_unit': 1, 'max_unit': 222, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62813) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2422.541399] env[62813]: DEBUG nova.compute.resource_tracker [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62813) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2422.541590] env[62813]: DEBUG oslo_concurrency.lockutils [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.281s {{(pid=62813) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2422.933814] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2422.934109] env[62813]: DEBUG nova.virt.vmwareapi.vmops [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Processing image f6ee7c32-a26c-4731-80b9-1e546ea30e47 {{(pid=62813) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2422.934330] env[62813]: DEBUG oslo_concurrency.lockutils [None req-dd023acf-8a07-4d32-a254-05d03835193d tempest-ServerPasswordTestJSON-215563873 tempest-ServerPasswordTestJSON-215563873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f6ee7c32-a26c-4731-80b9-1e546ea30e47/f6ee7c32-a26c-4731-80b9-1e546ea30e47.vmdk" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2423.526799] env[62813]: DEBUG nova.compute.manager [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Received event network-changed-1ef27b99-4f5d-496a-a533-c4e5b1867070 {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2423.527057] env[62813]: DEBUG nova.compute.manager [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Refreshing instance network info cache due to event network-changed-1ef27b99-4f5d-496a-a533-c4e5b1867070. {{(pid=62813) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2423.527129] env[62813]: DEBUG oslo_concurrency.lockutils [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] Acquiring lock "refresh_cache-71ee7b9e-55fb-470e-95c5-1106f87720dc" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2423.527260] env[62813]: DEBUG oslo_concurrency.lockutils [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] Acquired lock "refresh_cache-71ee7b9e-55fb-470e-95c5-1106f87720dc" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2423.527431] env[62813]: DEBUG nova.network.neutron [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Refreshing network info cache for port 1ef27b99-4f5d-496a-a533-c4e5b1867070 {{(pid=62813) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2423.813459] env[62813]: DEBUG nova.network.neutron [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Updated VIF entry in instance network info cache for port 1ef27b99-4f5d-496a-a533-c4e5b1867070. {{(pid=62813) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2423.813872] env[62813]: DEBUG nova.network.neutron [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] [instance: 71ee7b9e-55fb-470e-95c5-1106f87720dc] Updating instance_info_cache with network_info: [{"id": "1ef27b99-4f5d-496a-a533-c4e5b1867070", "address": "fa:16:3e:57:b4:74", "network": {"id": "73ffc08f-76cb-4303-bff5-893b21dd1d58", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1929892940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09fd808e93b546f580f0089011d00ff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef27b99-4f", "ovs_interfaceid": "1ef27b99-4f5d-496a-a533-c4e5b1867070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62813) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2423.825732] env[62813]: DEBUG oslo_concurrency.lockutils [req-ae8a6fad-c62f-4223-bfb4-7bbd6b61db9b req-b7382c56-62c4-4134-99c4-354d038311e6 service nova] Releasing lock "refresh_cache-71ee7b9e-55fb-470e-95c5-1106f87720dc" {{(pid=62813) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2425.541410] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2427.160823] env[62813]: DEBUG oslo_service.periodic_task [None req-61db1b0d-44bc-4d4d-97b1-bbb8a0a41d02 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62813) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}