[ 495.023848] env[62507]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62507) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 495.024265] env[62507]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62507) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 495.024265] env[62507]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62507) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 495.024590] env[62507]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 495.113928] env[62507]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62507) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 495.124019] env[62507]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62507) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 495.263183] env[62507]: INFO nova.virt.driver [None req-b6ac9dc5-c6f4-4c57-95b3-acff62c1b118 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 495.334967] env[62507]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.335133] env[62507]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.335242] env[62507]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62507) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 498.391649] env[62507]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-871bc70b-bdbd-4157-bfe4-1fa492b8f3e2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.407420] env[62507]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62507) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 498.407621] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-e642fd9d-fcb0-45da-bf3c-3d7128324011 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.435342] env[62507]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 6d8ed. [ 498.435515] env[62507]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.100s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.436065] env[62507]: INFO nova.virt.vmwareapi.driver [None req-b6ac9dc5-c6f4-4c57-95b3-acff62c1b118 None None] VMware vCenter version: 7.0.3 [ 498.439517] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0220a8-c384-448d-8280-d8fd0664a9f1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.460141] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3570ff-ae66-42b9-a05e-8289db2f6a64 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.465803] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faae4d3-c97d-41f4-b08f-fac92282e6a5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.472125] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb59bd2b-2af3-4629-9cc1-692cff7f1d06 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.484776] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20dd509-a2be-4596-91d3-eea2f000c2e9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.490648] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed58e9e-a939-4a78-9784-993420f15f93 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.521112] env[62507]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-10cb36b1-aa9e-4bf0-9a11-79ca6d41b453 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.526228] env[62507]: DEBUG nova.virt.vmwareapi.driver [None req-b6ac9dc5-c6f4-4c57-95b3-acff62c1b118 None None] Extension org.openstack.compute already exists. {{(pid=62507) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 498.528826] env[62507]: INFO nova.compute.provider_config [None req-b6ac9dc5-c6f4-4c57-95b3-acff62c1b118 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 498.551305] env[62507]: DEBUG nova.context [None req-b6ac9dc5-c6f4-4c57-95b3-acff62c1b118 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),3e79e424-25e6-41de-8ead-6c54447543b7(cell1) {{(pid=62507) load_cells /opt/stack/nova/nova/context.py:464}} [ 498.553322] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.553550] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.554260] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.554667] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Acquiring lock "3e79e424-25e6-41de-8ead-6c54447543b7" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.554855] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Lock "3e79e424-25e6-41de-8ead-6c54447543b7" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.555836] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Lock "3e79e424-25e6-41de-8ead-6c54447543b7" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.576609] env[62507]: INFO dbcounter [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Registered counter for database nova_cell0 [ 498.584987] env[62507]: INFO dbcounter [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Registered counter for database nova_cell1 [ 498.588113] env[62507]: DEBUG oslo_db.sqlalchemy.engines [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62507) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.588397] env[62507]: DEBUG oslo_db.sqlalchemy.engines [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62507) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.592894] env[62507]: DEBUG dbcounter [-] [62507] Writer thread running {{(pid=62507) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.593749] env[62507]: DEBUG dbcounter [-] [62507] Writer thread running {{(pid=62507) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.595827] env[62507]: ERROR nova.db.main.api [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.595827] env[62507]: result = function(*args, **kwargs) [ 498.595827] env[62507]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.595827] env[62507]: return func(*args, **kwargs) [ 498.595827] env[62507]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.595827] env[62507]: result = fn(*args, **kwargs) [ 498.595827] env[62507]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.595827] env[62507]: return f(*args, **kwargs) [ 498.595827] env[62507]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.595827] env[62507]: return db.service_get_minimum_version(context, binaries) [ 498.595827] env[62507]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.595827] env[62507]: _check_db_access() [ 498.595827] env[62507]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.595827] env[62507]: stacktrace = ''.join(traceback.format_stack()) [ 498.595827] env[62507]: [ 498.596819] env[62507]: ERROR nova.db.main.api [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.596819] env[62507]: result = function(*args, **kwargs) [ 498.596819] env[62507]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.596819] env[62507]: return func(*args, **kwargs) [ 498.596819] env[62507]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.596819] env[62507]: result = fn(*args, **kwargs) [ 498.596819] env[62507]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.596819] env[62507]: return f(*args, **kwargs) [ 498.596819] env[62507]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.596819] env[62507]: return db.service_get_minimum_version(context, binaries) [ 498.596819] env[62507]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.596819] env[62507]: _check_db_access() [ 498.596819] env[62507]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.596819] env[62507]: stacktrace = ''.join(traceback.format_stack()) [ 498.596819] env[62507]: [ 498.597414] env[62507]: WARNING nova.objects.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Failed to get minimum service version for cell 3e79e424-25e6-41de-8ead-6c54447543b7 [ 498.597414] env[62507]: WARNING nova.objects.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 498.597819] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Acquiring lock "singleton_lock" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 498.597985] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Acquired lock "singleton_lock" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 498.598251] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Releasing lock "singleton_lock" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 498.598572] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Full set of CONF: {{(pid=62507) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 498.598719] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ******************************************************************************** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 498.598849] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] Configuration options gathered from: {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 498.598984] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 498.599192] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 498.599324] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ================================================================================ {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 498.599540] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] allow_resize_to_same_host = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.599710] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] arq_binding_timeout = 300 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.599844] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] backdoor_port = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.599973] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] backdoor_socket = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.600153] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] block_device_allocate_retries = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.600320] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] block_device_allocate_retries_interval = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.600494] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cert = self.pem {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.600667] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.600842] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute_monitors = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.601024] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] config_dir = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.601206] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] config_drive_format = iso9660 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.601341] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.601537] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] config_source = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.601722] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] console_host = devstack {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.601894] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] control_exchange = nova {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.602074] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cpu_allocation_ratio = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.602243] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] daemon = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.602422] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] debug = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.602623] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] default_access_ip_network_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.602799] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] default_availability_zone = nova {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.602960] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] default_ephemeral_format = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.603142] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] default_green_pool_size = 1000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.603379] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.603546] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] default_schedule_zone = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.603714] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] disk_allocation_ratio = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.603912] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] enable_new_services = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.604115] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] enabled_apis = ['osapi_compute'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.604289] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] enabled_ssl_apis = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.604455] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] flat_injected = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.604618] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] force_config_drive = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.604782] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] force_raw_images = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.604951] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] graceful_shutdown_timeout = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.605127] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] heal_instance_info_cache_interval = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.605342] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] host = cpu-1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.605524] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.605696] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.605860] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.606085] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.606258] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_build_timeout = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.606425] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_delete_interval = 300 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.606600] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_format = [instance: %(uuid)s] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.606771] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_name_template = instance-%08x {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.606937] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_usage_audit = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.607127] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_usage_audit_period = month {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.607300] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.607469] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.607641] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] internal_service_availability_zone = internal {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.607804] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] key = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.607969] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] live_migration_retry_count = 30 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.608151] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_config_append = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.608327] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.608492] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_dir = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.608658] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.608791] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_options = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.608955] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_rotate_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.609140] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_rotate_interval_type = days {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.609312] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] log_rotation_type = none {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.609448] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.609580] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.609752] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.609924] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.610068] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.610240] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] long_rpc_timeout = 1800 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.610407] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] max_concurrent_builds = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.610571] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] max_concurrent_live_migrations = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.610734] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] max_concurrent_snapshots = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.610899] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] max_local_block_devices = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.611074] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] max_logfile_count = 30 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.611241] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] max_logfile_size_mb = 200 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.611406] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] maximum_instance_delete_attempts = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.611600] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metadata_listen = 0.0.0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.611781] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metadata_listen_port = 8775 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.611955] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metadata_workers = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.612137] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] migrate_max_retries = -1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.612311] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] mkisofs_cmd = genisoimage {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.612587] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.612746] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] my_ip = 10.180.1.21 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.612924] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] network_allocate_retries = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.613130] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.613309] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.613480] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] osapi_compute_listen_port = 8774 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.613655] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] osapi_compute_unique_server_name_scope = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.613860] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] osapi_compute_workers = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.614052] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] password_length = 12 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.614229] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] periodic_enable = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.614396] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] periodic_fuzzy_delay = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.614570] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] pointer_model = usbtablet {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.614744] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] preallocate_images = none {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.614912] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] publish_errors = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.615056] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] pybasedir = /opt/stack/nova {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.615224] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ram_allocation_ratio = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.615390] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] rate_limit_burst = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.615560] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] rate_limit_except_level = CRITICAL {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.615724] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] rate_limit_interval = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.615886] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reboot_timeout = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.616060] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reclaim_instance_interval = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.616227] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] record = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.616399] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reimage_timeout_per_gb = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.616568] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] report_interval = 120 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.616736] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] rescue_timeout = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.616899] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reserved_host_cpus = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.617075] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reserved_host_disk_mb = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.617244] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reserved_host_memory_mb = 512 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.617409] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] reserved_huge_pages = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.617575] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] resize_confirm_window = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.617738] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] resize_fs_using_block_device = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.617903] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] resume_guests_state_on_host_boot = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.618095] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.618264] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] rpc_response_timeout = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.618431] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] run_external_periodic_tasks = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.618609] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] running_deleted_instance_action = reap {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.618774] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.618939] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] running_deleted_instance_timeout = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.619116] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler_instance_sync_interval = 120 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.619291] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_down_time = 720 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.619466] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] servicegroup_driver = db {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.619633] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] shelved_offload_time = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.619803] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] shelved_poll_interval = 3600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.620048] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] shutdown_timeout = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.620238] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] source_is_ipv6 = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.620408] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ssl_only = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.620655] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.620832] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] sync_power_state_interval = 600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.621011] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] sync_power_state_pool_size = 1000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.621193] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] syslog_log_facility = LOG_USER {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.621364] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] tempdir = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.621568] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] timeout_nbd = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.621750] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] transport_url = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.621919] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] update_resources_interval = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.622100] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_cow_images = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.622335] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_eventlog = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.622528] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_journal = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.622697] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_json = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.622864] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_rootwrap_daemon = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.623040] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_stderr = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.623211] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] use_syslog = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.623376] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vcpu_pin_set = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.623548] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plugging_is_fatal = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.623727] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plugging_timeout = 300 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.623897] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] virt_mkfs = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.624473] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] volume_usage_poll_interval = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.624473] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] watch_log_file = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.624473] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] web = /usr/share/spice-html5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.624601] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_concurrency.disable_process_locking = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.624905] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.625107] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.625284] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.625463] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.625641] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.625812] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.625999] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.auth_strategy = keystone {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.626189] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.compute_link_prefix = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.626370] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.626548] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.dhcp_domain = novalocal {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.626724] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.enable_instance_password = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.626892] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.glance_link_prefix = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.627074] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.627254] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.627422] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.instance_list_per_project_cells = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.627589] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.list_records_by_skipping_down_cells = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.627756] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.local_metadata_per_cell = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.627925] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.max_limit = 1000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.628107] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.metadata_cache_expiration = 15 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.628296] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.neutron_default_tenant_id = default {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.628470] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.use_neutron_default_nets = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.628643] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.628807] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.628976] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.629166] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.629341] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_dynamic_targets = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.629513] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_jsonfile_path = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.629700] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.629893] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.backend = dogpile.cache.memcached {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.630075] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.backend_argument = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.630251] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.config_prefix = cache.oslo {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.630424] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.dead_timeout = 60.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.630594] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.debug_cache_backend = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.630760] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.enable_retry_client = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.630924] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.enable_socket_keepalive = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.631107] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.enabled = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.631276] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.enforce_fips_mode = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.631465] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.expiration_time = 600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.631648] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.hashclient_retry_attempts = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.631819] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.631985] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_dead_retry = 300 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.632164] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_password = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.632331] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.632530] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.632711] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_pool_maxsize = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.632881] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.633063] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_sasl_enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.633254] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.633429] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.633597] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.memcache_username = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.633768] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.proxies = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.633930] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.redis_password = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.634117] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.634299] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.634470] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.redis_server = localhost:6379 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.634639] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.redis_socket_timeout = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.634799] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.redis_username = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.634962] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.retry_attempts = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.635145] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.retry_delay = 0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.635312] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.socket_keepalive_count = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.635476] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.socket_keepalive_idle = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.635640] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.socket_keepalive_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.635799] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.tls_allowed_ciphers = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.635959] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.tls_cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.636134] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.tls_certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.636299] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.tls_enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.636460] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cache.tls_keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.636634] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.636808] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.auth_type = password {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.636972] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.637164] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.637327] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.637495] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.637663] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.cross_az_attach = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.637827] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.debug = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.637990] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.endpoint_template = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.638175] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.http_retries = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.638342] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.638504] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.638680] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.os_region_name = RegionOne {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.638845] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.639015] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cinder.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.639198] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.639364] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.cpu_dedicated_set = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.639525] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.cpu_shared_set = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.639694] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.image_type_exclude_list = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.639861] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.640037] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.640208] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.640375] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.640550] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.640718] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.resource_provider_association_refresh = 300 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.640885] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.shutdown_retry_interval = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.641083] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.641266] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] conductor.workers = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.641469] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] console.allowed_origins = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.641650] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] console.ssl_ciphers = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.641828] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] console.ssl_minimum_version = default {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.642008] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] consoleauth.enforce_session_timeout = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.642193] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] consoleauth.token_ttl = 600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.642365] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.642555] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.642732] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.642898] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.643074] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.643244] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.643414] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.643581] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.643767] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.643950] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.644130] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.region_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.644298] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.644463] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.644640] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.service_type = accelerator {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.644809] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.644972] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.645152] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.645317] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.645502] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.645668] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] cyborg.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.645851] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.backend = sqlalchemy {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.646037] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.connection = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.646215] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.connection_debug = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.646390] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.connection_parameters = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.646561] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.connection_recycle_time = 3600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.646728] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.connection_trace = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.646893] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.db_inc_retry_interval = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.647072] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.db_max_retries = 20 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.647243] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.db_max_retry_interval = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.647410] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.db_retry_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.647577] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.max_overflow = 50 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.647744] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.max_pool_size = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.647905] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.max_retries = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.648089] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.mysql_wsrep_sync_wait = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.pool_timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.retry_interval = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.slave_connection = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.sqlite_synchronous = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] database.use_db_reconnect = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649871] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.backend = sqlalchemy {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649871] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.connection = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649871] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.connection_debug = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649871] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.connection_parameters = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.649871] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.connection_recycle_time = 3600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.650028] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.connection_trace = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.650181] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.db_inc_retry_interval = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.650349] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.db_max_retries = 20 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.650515] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.db_max_retry_interval = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.650680] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.db_retry_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.650844] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.max_overflow = 50 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.651014] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.max_pool_size = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.651185] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.max_retries = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.651355] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.651548] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.651721] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.pool_timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.651888] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.retry_interval = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.652063] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.slave_connection = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.652233] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] api_database.sqlite_synchronous = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.652416] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] devices.enabled_mdev_types = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.652592] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.652763] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.652927] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ephemeral_storage_encryption.enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.653106] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.653281] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.api_servers = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.653450] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.653616] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.653807] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.653984] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.654165] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.654333] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.debug = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.654503] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.default_trusted_certificate_ids = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.654670] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.enable_certificate_validation = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.654833] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.enable_rbd_download = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.654994] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.655178] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.655343] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.655504] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.655669] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.655829] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.num_retries = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.655999] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.rbd_ceph_conf = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.656179] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.rbd_connect_timeout = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.656349] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.rbd_pool = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.656519] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.rbd_user = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.656682] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.region_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.656843] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.657018] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.657201] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.service_type = image {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.657374] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.657534] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.657702] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.657866] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.658064] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.658239] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.verify_glance_signatures = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.658405] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] glance.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.658583] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] guestfs.debug = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.658752] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] mks.enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.659135] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.659354] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] image_cache.manager_interval = 2400 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.659573] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] image_cache.precache_concurrency = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.659759] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] image_cache.remove_unused_base_images = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.659942] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.660124] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.660310] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] image_cache.subdirectory_name = _base {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.660492] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.api_max_retries = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.660667] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.api_retry_interval = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.660829] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.660994] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.auth_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.661173] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.661335] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.661532] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.661712] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.conductor_group = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.661880] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.662057] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.662230] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.662402] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.662561] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.662726] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.662888] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.663071] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.peer_list = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.663239] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.region_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.663412] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.663578] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.serial_console_state_timeout = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.663747] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.663916] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.service_type = baremetal {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.664091] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.shard = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.664264] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.664427] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.664593] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.664756] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.664939] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.665118] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ironic.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.665307] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.665486] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] key_manager.fixed_key = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.665672] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.665842] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.barbican_api_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.666010] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.barbican_endpoint = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.666193] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.barbican_endpoint_type = public {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.666371] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.barbican_region_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.666523] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.666684] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.666855] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.667031] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.667203] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.667370] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.number_of_retries = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.667537] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.retry_delay = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.667705] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.send_service_user_token = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.667872] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.668044] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.668216] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.verify_ssl = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.668380] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican.verify_ssl_path = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.668549] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.668717] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.auth_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.668885] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.669054] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.669226] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.669418] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.669589] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.669761] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.669924] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] barbican_service_user.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.670109] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.approle_role_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.670278] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.approle_secret_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.670443] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.670610] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.670777] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.670941] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.671113] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.671337] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.kv_mountpoint = secret {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.671561] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.kv_path = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.671779] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.kv_version = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.671979] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.namespace = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.672181] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.root_token_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.672357] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.672546] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.ssl_ca_crt_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.672749] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.672949] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.use_ssl = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.673183] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.673392] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.673592] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.auth_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.673831] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.674053] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.674245] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.674416] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.674585] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.674752] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.674920] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.675098] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.675266] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.675427] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.675588] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.region_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.675748] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.675910] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.676098] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.service_type = identity {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.676269] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.676431] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.676596] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.676758] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.676988] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.677202] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] keystone.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.677423] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.connection_uri = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.677621] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_mode = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.677823] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.678017] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_models = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.678207] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_power_governor_high = performance {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.678385] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.678555] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_power_management = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.678734] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.678904] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.device_detach_attempts = 8 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.679080] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.device_detach_timeout = 20 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.679256] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.disk_cachemodes = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.679424] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.disk_prefix = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.679595] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.enabled_perf_events = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.679760] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.file_backed_memory = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.679930] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.gid_maps = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.680107] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.hw_disk_discard = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.680275] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.hw_machine_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.680457] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_rbd_ceph_conf = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.680634] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.680801] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.680975] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_rbd_glance_store_name = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.681169] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_rbd_pool = rbd {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.681346] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_type = default {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.681536] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.images_volume_group = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.681721] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.inject_key = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.681891] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.inject_partition = -2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.682072] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.inject_password = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.682251] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.iscsi_iface = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.682441] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.iser_use_multipath = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.682652] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.682831] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.683020] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_downtime = 500 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.683193] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.683364] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.683533] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_inbound_addr = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.683704] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.683873] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.684051] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_scheme = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.684240] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_timeout_action = abort {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.684415] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_tunnelled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.684586] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_uri = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.684756] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.live_migration_with_native_tls = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.684923] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.max_queues = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.685106] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.685359] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.685532] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.nfs_mount_options = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.685841] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.686033] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.686214] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.686386] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.686558] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.686731] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.num_pcie_ports = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.686907] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.687092] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.pmem_namespaces = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.687269] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.quobyte_client_cfg = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.687559] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.687740] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.687911] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.688093] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.688268] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rbd_secret_uuid = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.688434] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rbd_user = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.688606] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.688783] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.688952] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rescue_image_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.689130] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rescue_kernel_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.689298] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rescue_ramdisk_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.689471] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.689636] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.rx_queue_size = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.689809] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.smbfs_mount_options = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.690100] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.690279] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.snapshot_compression = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.690448] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.snapshot_image_format = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.690678] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.690846] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.sparse_logical_volumes = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.691025] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.swtpm_enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.691203] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.swtpm_group = tss {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.691378] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.swtpm_user = tss {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.691590] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.sysinfo_serial = unique {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.691766] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.tb_cache_size = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.691932] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.tx_queue_size = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.692118] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.uid_maps = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.692291] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.use_virtio_for_bridges = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.692500] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.virt_type = kvm {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.692697] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.volume_clear = zero {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.692869] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.volume_clear_size = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.693055] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.volume_use_multipath = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.693227] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_cache_path = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.693404] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.693582] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.693759] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.693931] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.694227] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.694411] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.vzstorage_mount_user = stack {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.694612] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.694804] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.694983] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.auth_type = password {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.695166] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.695332] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.695547] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.695666] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.695834] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.696024] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.default_floating_pool = public {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.696189] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.696360] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.extension_sync_interval = 600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.696529] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.http_retries = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.696695] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.696856] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.697027] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.697205] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.697370] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.697539] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.ovs_bridge = br-int {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.697710] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.physnets = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.697880] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.region_name = RegionOne {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.698052] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.698229] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.service_metadata_proxy = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.698397] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.698572] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.service_type = network {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.698742] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.698905] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.699079] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.699246] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.699431] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.699599] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] neutron.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.699773] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] notifications.bdms_in_notifications = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.699954] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] notifications.default_level = INFO {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.700150] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] notifications.notification_format = unversioned {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.700322] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] notifications.notify_on_state_change = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.700503] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.700685] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] pci.alias = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.700862] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] pci.device_spec = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.701040] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] pci.report_in_placement = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.701224] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.701406] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.auth_type = password {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.702025] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.702025] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.702025] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.702139] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.702249] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704487] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704487] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.default_domain_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704487] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.default_domain_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704487] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.domain_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704487] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.domain_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704487] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.password = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.project_domain_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704845] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.project_domain_name = Default {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704845] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.project_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704845] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.project_name = service {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.704921] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.region_name = RegionOne {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.705217] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.705217] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.705396] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.service_type = placement {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.705566] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.705728] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.705895] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.706070] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.system_scope = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.706236] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.706400] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.trust_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.706589] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.user_domain_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.706788] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.user_domain_name = Default {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.706956] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.user_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.707149] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.username = placement {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.707338] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.707506] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] placement.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.707691] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.cores = 20 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.707862] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.count_usage_from_placement = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.708049] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.708234] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.injected_file_content_bytes = 10240 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.708488] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.injected_file_path_length = 255 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.708696] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.injected_files = 5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.708874] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.instances = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.709061] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.key_pairs = 100 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.709240] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.metadata_items = 128 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.709413] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.ram = 51200 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.709588] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.recheck_quota = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.709762] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.server_group_members = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.709934] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] quota.server_groups = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.710155] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.710384] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.710568] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.image_metadata_prefilter = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.710744] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.710918] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.max_attempts = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.711103] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.max_placement_results = 1000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.711278] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.711469] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.711653] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.711833] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] scheduler.workers = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.712025] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.712209] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.712400] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.712579] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.712756] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.712926] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.713110] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.713307] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.713480] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.host_subset_size = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.713657] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.713846] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.714035] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.714218] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.isolated_hosts = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.714390] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.isolated_images = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.714565] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.714732] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.714911] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.715098] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.pci_in_placement = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.715271] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.715441] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.715609] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.715773] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.715940] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.716121] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.716290] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.track_instance_changes = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.716471] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.716650] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metrics.required = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.716818] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metrics.weight_multiplier = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.716986] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.717171] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] metrics.weight_setting = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.717491] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.717674] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] serial_console.enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.717855] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] serial_console.port_range = 10000:20000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.718043] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.718222] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.718394] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] serial_console.serialproxy_port = 6083 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.718569] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.718747] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.auth_type = password {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.718914] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.719090] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.719262] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.719428] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.719595] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.719771] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.send_service_user_token = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.719939] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.720115] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] service_user.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.720292] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.agent_enabled = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.720480] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.720851] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.721065] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.721249] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.html5proxy_port = 6082 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.721450] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.image_compression = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.721643] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.jpeg_compression = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.721817] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.playback_compression = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.721996] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.server_listen = 127.0.0.1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.722213] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.722355] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.streaming_mode = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.722546] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] spice.zlib_compression = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.722727] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] upgrade_levels.baseapi = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.722904] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] upgrade_levels.compute = auto {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.723083] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] upgrade_levels.conductor = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.723253] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] upgrade_levels.scheduler = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.723424] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.723636] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.723840] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.724024] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.724198] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.724367] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.724532] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.724702] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.724866] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vendordata_dynamic_auth.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.725055] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.api_retry_count = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.725228] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.ca_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.725406] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.725580] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.cluster_name = testcl1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.725750] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.connection_pool_size = 10 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.725913] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.console_delay_seconds = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.726099] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.datastore_regex = ^datastore.* {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.726316] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.726495] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.host_password = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.726668] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.host_port = 443 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.726843] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.host_username = administrator@vsphere.local {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.727028] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.insecure = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.727203] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.integration_bridge = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.727374] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.maximum_objects = 100 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.727540] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.pbm_default_policy = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.727709] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.pbm_enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.727874] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.pbm_wsdl_location = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.728057] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.728223] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.serial_port_proxy_uri = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.728385] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.serial_port_service_uri = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.728554] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.task_poll_interval = 0.5 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.728731] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.use_linked_clone = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.728903] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.vnc_keymap = en-us {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.729085] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.vnc_port = 5900 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.729257] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vmware.vnc_port_total = 10000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.729449] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.auth_schemes = ['none'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.729631] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.729931] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.730138] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.730320] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.novncproxy_port = 6080 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.730505] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.server_listen = 127.0.0.1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.730687] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.730854] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.vencrypt_ca_certs = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.731026] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.vencrypt_client_cert = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.731195] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vnc.vencrypt_client_key = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.731381] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.731575] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.disable_deep_image_inspection = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.731754] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.731921] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.732099] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.732271] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.disable_rootwrap = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.732440] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.enable_numa_live_migration = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.732639] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.732826] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.732994] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.733180] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.libvirt_disable_apic = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.733348] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.733513] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.733683] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.733848] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.734021] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.734194] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.734360] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.734527] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.734692] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.734862] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.735061] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.735241] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.client_socket_timeout = 900 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.735415] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.default_pool_size = 1000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.735589] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.keep_alive = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.735759] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.max_header_line = 16384 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.735927] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.736106] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.ssl_ca_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.736277] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.ssl_cert_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.736442] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.ssl_key_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.736615] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.tcp_keepidle = 600 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.736794] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.736964] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] zvm.ca_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.737141] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] zvm.cloud_connector_url = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.737432] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.737612] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] zvm.reachable_timeout = 300 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.737799] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.enforce_new_defaults = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.737977] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.enforce_scope = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.738176] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.policy_default_rule = default {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.738364] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.738544] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.policy_file = policy.yaml {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.738720] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.738886] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.739068] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.739236] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.739404] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.739581] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.739760] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.739939] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.connection_string = messaging:// {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.740124] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.enabled = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.740302] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.es_doc_type = notification {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.740472] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.es_scroll_size = 10000 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.740649] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.es_scroll_time = 2m {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.740818] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.filter_error_trace = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.740991] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.hmac_keys = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.741183] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.sentinel_service_name = mymaster {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.741356] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.socket_timeout = 0.1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.741553] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.trace_requests = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.741732] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler.trace_sqlalchemy = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.741924] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler_jaeger.process_tags = {} {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.742105] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler_jaeger.service_name_prefix = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.742278] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] profiler_otlp.service_name_prefix = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.742470] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] remote_debug.host = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.742647] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] remote_debug.port = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.742834] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.743011] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.743187] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.743356] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.743525] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.743691] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.743900] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.744100] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.744273] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.744450] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.744620] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.744795] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.744968] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.745161] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.745339] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.745513] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.745683] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.745865] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.746044] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.746218] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.746389] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.746559] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.746727] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.746899] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.747084] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.747254] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.747423] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.747591] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.747761] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.747929] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.ssl = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.748120] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.748310] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.748481] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.748658] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.748834] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.749016] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.749211] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.749383] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_notifications.retry = -1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.749573] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.749761] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.749939] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.auth_section = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.750123] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.auth_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.750291] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.cafile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.750457] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.certfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.750629] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.collect_timing = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.750792] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.connect_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.750956] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.connect_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.751132] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.endpoint_id = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.751300] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.endpoint_override = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.751510] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.insecure = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.751664] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.keyfile = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.751827] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.max_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.751988] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.min_version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.752165] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.region_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.752329] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.retriable_status_codes = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.752495] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.service_name = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.752655] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.service_type = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.752821] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.split_loggers = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.752984] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.status_code_retries = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.753160] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.status_code_retry_delay = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.753322] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.timeout = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.753486] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.valid_interfaces = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.753650] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_limit.version = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.753841] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_reports.file_event_handler = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.754030] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.754200] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] oslo_reports.log_dir = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.754377] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.754544] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.754708] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.754878] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.755056] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.755226] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.755402] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.755568] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_ovs_privileged.group = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.755730] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.755931] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.756123] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.756294] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] vif_plug_ovs_privileged.user = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.756471] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.756657] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.756838] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.757019] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.757202] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.757373] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.757544] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.757715] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.757898] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.758088] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.isolate_vif = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.758273] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.758449] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.758627] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.758800] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.758974] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_vif_ovs.per_port_bridge = False {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.759157] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_brick.lock_path = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.759327] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.759493] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.759669] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] privsep_osbrick.capabilities = [21] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.759836] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] privsep_osbrick.group = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.760007] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] privsep_osbrick.helper_command = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.760187] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.760357] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.760522] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] privsep_osbrick.user = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.760698] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.760862] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] nova_sys_admin.group = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.761033] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] nova_sys_admin.helper_command = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.761208] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.761375] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.761563] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] nova_sys_admin.user = None {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.761703] env[62507]: DEBUG oslo_service.service [None req-85d40937-7886-4948-8223-bef26dca53c4 None None] ******************************************************************************** {{(pid=62507) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 498.762171] env[62507]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 498.771705] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Getting list of instances from cluster (obj){ [ 498.771705] env[62507]: value = "domain-c8" [ 498.771705] env[62507]: _type = "ClusterComputeResource" [ 498.771705] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 498.772949] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18021af8-465a-40be-b97f-9513e9d6b431 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.782127] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Got total of 0 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 498.782646] env[62507]: WARNING nova.virt.vmwareapi.driver [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 498.783104] env[62507]: INFO nova.virt.node [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Generated node identity 40e67440-0925-46e5-9b58-6e63187cdfab [ 498.783333] env[62507]: INFO nova.virt.node [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Wrote node identity 40e67440-0925-46e5-9b58-6e63187cdfab to /opt/stack/data/n-cpu-1/compute_id [ 498.794989] env[62507]: WARNING nova.compute.manager [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Compute nodes ['40e67440-0925-46e5-9b58-6e63187cdfab'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 498.833987] env[62507]: INFO nova.compute.manager [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 498.858190] env[62507]: WARNING nova.compute.manager [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 498.858674] env[62507]: DEBUG oslo_concurrency.lockutils [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.858929] env[62507]: DEBUG oslo_concurrency.lockutils [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.859116] env[62507]: DEBUG oslo_concurrency.lockutils [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.859277] env[62507]: DEBUG nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 498.860370] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53cd271-4e1e-427a-a0ed-8c1898341449 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.868715] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e50124-8134-430b-8d2c-caf81b2f55e1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.883097] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bc20c5-a7a6-4400-a3ab-d5131472e02b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.889182] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd58587-8d5d-4243-a205-ca82e5053cae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.919360] env[62507]: DEBUG nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181162MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 498.919527] env[62507]: DEBUG oslo_concurrency.lockutils [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.919697] env[62507]: DEBUG oslo_concurrency.lockutils [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.931066] env[62507]: WARNING nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] No compute node record for cpu-1:40e67440-0925-46e5-9b58-6e63187cdfab: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 40e67440-0925-46e5-9b58-6e63187cdfab could not be found. [ 498.947558] env[62507]: INFO nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 40e67440-0925-46e5-9b58-6e63187cdfab [ 499.000646] env[62507]: DEBUG nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 499.000878] env[62507]: DEBUG nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 499.104851] env[62507]: INFO nova.scheduler.client.report [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] [req-5cd0d69b-90f1-49c8-9c0d-d76c1548d02f] Created resource provider record via placement API for resource provider with UUID 40e67440-0925-46e5-9b58-6e63187cdfab and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 499.122849] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfbe99d-cf61-4ac8-adea-fb3382753c36 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.130467] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5decb33-1a6b-4de8-ba68-6147dac98baa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.161472] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b769d4b-f901-4a0d-b92a-ba1439303c70 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.168961] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db88d71-84e5-40d0-9fe3-eb7463f54d20 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.183233] env[62507]: DEBUG nova.compute.provider_tree [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 499.222773] env[62507]: DEBUG nova.scheduler.client.report [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Updated inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 499.223016] env[62507]: DEBUG nova.compute.provider_tree [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Updating resource provider 40e67440-0925-46e5-9b58-6e63187cdfab generation from 0 to 1 during operation: update_inventory {{(pid=62507) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.223167] env[62507]: DEBUG nova.compute.provider_tree [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 499.273848] env[62507]: DEBUG nova.compute.provider_tree [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Updating resource provider 40e67440-0925-46e5-9b58-6e63187cdfab generation from 1 to 2 during operation: update_traits {{(pid=62507) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.291212] env[62507]: DEBUG nova.compute.resource_tracker [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 499.291451] env[62507]: DEBUG oslo_concurrency.lockutils [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.372s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 499.291607] env[62507]: DEBUG nova.service [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Creating RPC server for service compute {{(pid=62507) start /opt/stack/nova/nova/service.py:182}} [ 499.304942] env[62507]: DEBUG nova.service [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] Join ServiceGroup membership for this service compute {{(pid=62507) start /opt/stack/nova/nova/service.py:199}} [ 499.305144] env[62507]: DEBUG nova.servicegroup.drivers.db [None req-73a970de-5676-4f67-87ed-13d14a64af2a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62507) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 508.594422] env[62507]: DEBUG dbcounter [-] [62507] Writing DB stats nova_cell1:SELECT=1 {{(pid=62507) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 508.595245] env[62507]: DEBUG dbcounter [-] [62507] Writing DB stats nova_cell0:SELECT=1 {{(pid=62507) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 511.309361] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 511.319502] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Getting list of instances from cluster (obj){ [ 511.319502] env[62507]: value = "domain-c8" [ 511.319502] env[62507]: _type = "ClusterComputeResource" [ 511.319502] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 511.320561] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4640b76a-ff1e-40ca-9fb7-3cbb16ad9fe8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.329102] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Got total of 0 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 511.329317] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 511.329609] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Getting list of instances from cluster (obj){ [ 511.329609] env[62507]: value = "domain-c8" [ 511.329609] env[62507]: _type = "ClusterComputeResource" [ 511.329609] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 511.330426] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d295c500-5985-4761-b04f-af1d990e712c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.337574] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Got total of 0 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 535.041100] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "55592fa8-93f7-49a2-8022-f4d0825c705b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.041423] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Lock "55592fa8-93f7-49a2-8022-f4d0825c705b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.056788] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 535.163381] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.163658] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.165248] env[62507]: INFO nova.compute.claims [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.285953] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04d0329-c89a-43ed-a81c-d02e4269c282 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.293865] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcc58a5-8715-4b53-b365-4c8d41062a57 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.323987] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7c0bbe-d428-4226-a088-7b9fd3d69e6f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.330990] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d3a80c-fccc-4fc6-a832-047e12c46e5e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.344662] env[62507]: DEBUG nova.compute.provider_tree [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.357664] env[62507]: DEBUG nova.scheduler.client.report [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 535.377410] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.214s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.378156] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 535.420188] env[62507]: DEBUG nova.compute.utils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.421893] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Not allocating networking since 'none' was specified. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 535.440696] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 535.521459] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 537.048020] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 537.048328] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 537.048421] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.048571] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 537.048691] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.049021] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 537.049443] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 537.049443] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 537.049967] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 537.049967] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 537.049967] env[62507]: DEBUG nova.virt.hardware [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 537.051160] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833778df-9322-4045-9e32-41b05909fd6c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.059885] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0a6e40-e097-4d5c-80c4-8feb6464319c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.075784] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c768be-bc6f-43bc-bd7c-eb2f49ec995f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.093876] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Instance VIF info [] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 537.103203] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 537.103492] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65e2f37d-9f28-48a6-83b7-e18ed3f56e6b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.113836] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Created folder: OpenStack in parent group-v4. [ 537.114038] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Creating folder: Project (773c718df5d2423bb5c2665e4b642172). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 537.114276] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd96fa64-e954-4a53-b870-3e6b8dab5d00 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.123976] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Created folder: Project (773c718df5d2423bb5c2665e4b642172) in parent group-v497991. [ 537.123976] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Creating folder: Instances. Parent ref: group-v497992. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 537.124103] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-517fc94b-d82a-4c37-92ee-b8c97a38b1d7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.133099] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Created folder: Instances in parent group-v497992. [ 537.133373] env[62507]: DEBUG oslo.service.loopingcall [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.133553] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 537.133743] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea7bf628-c822-43d1-9650-2c9e03cd7066 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.151299] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 537.151299] env[62507]: value = "task-2459893" [ 537.151299] env[62507]: _type = "Task" [ 537.151299] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.159939] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459893, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.667018] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459893, 'name': CreateVM_Task, 'duration_secs': 0.425404} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.667018] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 537.667018] env[62507]: DEBUG oslo_vmware.service [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c3916f-7772-4643-8229-187d23d6db22 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.674264] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.674639] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.675402] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 537.675749] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb1b0df3-cce6-40b5-ad8d-9e85d95fcb8d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.681039] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Waiting for the task: (returnval){ [ 537.681039] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5214e046-bf40-4782-3b16-310515661135" [ 537.681039] env[62507]: _type = "Task" [ 537.681039] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.689465] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5214e046-bf40-4782-3b16-310515661135, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.910609] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquiring lock "fa608f4a-47e6-4904-af65-a82c107af979" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.910845] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Lock "fa608f4a-47e6-4904-af65-a82c107af979" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.937385] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 538.014423] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.015739] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.017947] env[62507]: INFO nova.compute.claims [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 538.143586] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43061d6-0c89-41da-a7d1-d8dec954deee {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.151857] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d385ff2c-a532-4c04-99a6-b86bfb28d37a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.194836] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b836e197-de80-4df2-a50e-5242d88a0f78 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.206191] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.209529] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 538.209529] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.209529] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.209529] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 538.209684] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b278f994-199e-4168-8e36-00e38a97956f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.218020] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2004e0c-9826-4a2a-ad0d-be5c64f7e4d9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.229097] env[62507]: DEBUG nova.compute.provider_tree [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.231924] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 538.232150] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 538.232909] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05084bab-f7a7-4caf-8537-0bdc0c81f994 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.239279] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acbb170a-9a80-40d3-bad0-71c62cea94e7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.244031] env[62507]: DEBUG nova.scheduler.client.report [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 538.253240] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Waiting for the task: (returnval){ [ 538.253240] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52411496-7a58-7c8f-ea71-16fbacc20a7d" [ 538.253240] env[62507]: _type = "Task" [ 538.253240] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.261063] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52411496-7a58-7c8f-ea71-16fbacc20a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.264972] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.249s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.264972] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 538.311558] env[62507]: DEBUG nova.compute.utils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.312863] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 538.313832] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 538.327063] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 538.415759] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 538.448361] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 538.448588] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 538.448740] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.448917] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 538.449080] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.449234] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 538.449443] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 538.449601] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 538.449768] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 538.449957] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 538.450149] env[62507]: DEBUG nova.virt.hardware [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 538.452185] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d7b66f-12e8-438e-bb02-2a8fc270f236 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.460360] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4e5bd6-aa8b-4b03-ad2f-8a613651636a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.706836] env[62507]: DEBUG nova.policy [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75f71b5470f74c43ba8f18c1045e4b8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a622b02b7b345b193448f79ac976b06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 538.766106] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 538.766917] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Creating directory with path [datastore2] vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 538.770953] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd364f16-29e3-44a2-ac4d-eca8e7e97418 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.835507] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Created directory with path [datastore2] vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 538.835507] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Fetch image to [datastore2] vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 538.835507] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 538.835507] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf6e0f5-af5f-4bfe-9522-319ab68753f8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.847258] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dbbfa5-2ad2-4359-8619-ddd76af80b24 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.861295] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a11470-4486-4a28-be47-4eaa6976ef44 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.897767] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78eb0134-b854-41d6-8936-c8a3b9e22f0c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.905123] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9e41bee7-3b8b-49b3-b9ef-bbe522657d57 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.992587] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 539.086422] env[62507]: DEBUG oslo_vmware.rw_handles [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 539.177333] env[62507]: DEBUG oslo_vmware.rw_handles [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 539.177566] env[62507]: DEBUG oslo_vmware.rw_handles [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 540.249209] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Successfully created port: 69f0c988-a2d2-4296-92f3-d98e73fec759 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.312433] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "598b1fd3-d762-4625-9660-ccf76af2394c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.313385] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "598b1fd3-d762-4625-9660-ccf76af2394c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.327863] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 542.412280] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.412667] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.416378] env[62507]: INFO nova.compute.claims [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.597581] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f238e0-1f02-4d79-9a3f-d38009a4c4fe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.609870] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb267aea-1ac5-468d-8dc3-406b0617b80d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.653622] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22da3e6-642e-4edd-9357-d3035d1b8990 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.665158] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12e0460-3e87-4a4d-bbe3-6f48aa035b35 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.693970] env[62507]: DEBUG nova.compute.provider_tree [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.710503] env[62507]: DEBUG nova.scheduler.client.report [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 542.767699] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.355s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.768148] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 542.848631] env[62507]: DEBUG nova.compute.utils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.850431] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 542.852979] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 542.876455] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 543.019700] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 543.072875] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.073110] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.073411] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.073822] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.073822] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.073822] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.073998] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.074836] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.075168] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.075265] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.075424] env[62507]: DEBUG nova.virt.hardware [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.076343] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51298933-d630-450a-85f7-14bc61d15eaf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.089527] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7688f7e2-9eaf-4fcb-8803-0104f839425e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.102542] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquiring lock "4144689d-05a1-4e7f-b159-75cbaef82333" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.102767] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Lock "4144689d-05a1-4e7f-b159-75cbaef82333" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.127243] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 543.147248] env[62507]: DEBUG nova.policy [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd63a75979066476d9ef8b1e94a6f52b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b897f2e1e1a48a1995901829a999601', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 543.229694] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.229946] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.231545] env[62507]: INFO nova.compute.claims [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.372265] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251253bb-4a41-4d8f-b6b4-179c34c0d854 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.382089] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80216f1-e41d-4d57-b3b0-682ba072f97f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.417562] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640487a9-7b5b-4904-9c2c-907594fe2737 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.426106] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3888c357-6a74-4fe3-b490-76e2b5e86186 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.439264] env[62507]: DEBUG nova.compute.provider_tree [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.453741] env[62507]: DEBUG nova.scheduler.client.report [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 543.479069] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.249s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.479607] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 543.535621] env[62507]: DEBUG nova.compute.utils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.537020] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 543.537303] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 543.556452] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 543.661864] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 543.698727] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.698993] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.699199] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.699406] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.699584] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.700679] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.700679] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.703304] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.703545] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.703725] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.703910] env[62507]: DEBUG nova.virt.hardware [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.705312] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcc3fd1-9117-4c01-be99-88af9120195e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.714430] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7e4b6b-71ea-40a4-b2e6-286a50559ec4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.935896] env[62507]: DEBUG nova.policy [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aecbc5d3d85c48b4afa4564dedec7504', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '934970a68f7547b7bad6aa33c0ea2fee', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.283538] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Successfully updated port: 69f0c988-a2d2-4296-92f3-d98e73fec759 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 545.303770] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquiring lock "refresh_cache-fa608f4a-47e6-4904-af65-a82c107af979" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.303770] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquired lock "refresh_cache-fa608f4a-47e6-4904-af65-a82c107af979" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.303770] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.502386] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.687452] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Successfully created port: 54342925-eaad-4a61-b9ce-ec421de6973e {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.910446] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Successfully created port: 8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.927804] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Updating instance_info_cache with network_info: [{"id": "69f0c988-a2d2-4296-92f3-d98e73fec759", "address": "fa:16:3e:eb:ba:ec", "network": {"id": "2968634f-4390-45d9-b1df-0644cd1244b0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-200178405-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a622b02b7b345b193448f79ac976b06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69f0c988-a2", "ovs_interfaceid": "69f0c988-a2d2-4296-92f3-d98e73fec759", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.945512] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Releasing lock "refresh_cache-fa608f4a-47e6-4904-af65-a82c107af979" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.945795] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Instance network_info: |[{"id": "69f0c988-a2d2-4296-92f3-d98e73fec759", "address": "fa:16:3e:eb:ba:ec", "network": {"id": "2968634f-4390-45d9-b1df-0644cd1244b0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-200178405-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a622b02b7b345b193448f79ac976b06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69f0c988-a2", "ovs_interfaceid": "69f0c988-a2d2-4296-92f3-d98e73fec759", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 546.946342] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:ba:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69f0c988-a2d2-4296-92f3-d98e73fec759', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 546.960582] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Creating folder: Project (0a622b02b7b345b193448f79ac976b06). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.961266] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c204ee20-ce95-4a0e-b76e-2e29f4d5c89f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.977069] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Created folder: Project (0a622b02b7b345b193448f79ac976b06) in parent group-v497991. [ 546.977069] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Creating folder: Instances. Parent ref: group-v497995. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.977889] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a24a197-6aa6-4798-a85e-5a925f7bb210 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.989960] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Created folder: Instances in parent group-v497995. [ 546.990417] env[62507]: DEBUG oslo.service.loopingcall [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 546.990417] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 546.990921] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74838e5d-c1be-4ae9-8f30-6d7aec2c3f16 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.015458] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 547.015458] env[62507]: value = "task-2459896" [ 547.015458] env[62507]: _type = "Task" [ 547.015458] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.023548] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459896, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.530835] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459896, 'name': CreateVM_Task, 'duration_secs': 0.338246} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.530835] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 547.586628] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.586800] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.587466] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 547.587737] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b840303-835e-4b46-8a77-9a88a4160dc9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.600221] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Waiting for the task: (returnval){ [ 547.600221] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52668218-5e45-1c33-4365-07e2382314ca" [ 547.600221] env[62507]: _type = "Task" [ 547.600221] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.618181] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52668218-5e45-1c33-4365-07e2382314ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.891342] env[62507]: DEBUG nova.compute.manager [req-8ecdfeee-fc87-435d-b62f-7c0a2db38658 req-0be45a14-271d-4e37-97f3-e16a12c8ceb8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Received event network-vif-plugged-69f0c988-a2d2-4296-92f3-d98e73fec759 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 547.891567] env[62507]: DEBUG oslo_concurrency.lockutils [req-8ecdfeee-fc87-435d-b62f-7c0a2db38658 req-0be45a14-271d-4e37-97f3-e16a12c8ceb8 service nova] Acquiring lock "fa608f4a-47e6-4904-af65-a82c107af979-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.891811] env[62507]: DEBUG oslo_concurrency.lockutils [req-8ecdfeee-fc87-435d-b62f-7c0a2db38658 req-0be45a14-271d-4e37-97f3-e16a12c8ceb8 service nova] Lock "fa608f4a-47e6-4904-af65-a82c107af979-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.891948] env[62507]: DEBUG oslo_concurrency.lockutils [req-8ecdfeee-fc87-435d-b62f-7c0a2db38658 req-0be45a14-271d-4e37-97f3-e16a12c8ceb8 service nova] Lock "fa608f4a-47e6-4904-af65-a82c107af979-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.896478] env[62507]: DEBUG nova.compute.manager [req-8ecdfeee-fc87-435d-b62f-7c0a2db38658 req-0be45a14-271d-4e37-97f3-e16a12c8ceb8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] No waiting events found dispatching network-vif-plugged-69f0c988-a2d2-4296-92f3-d98e73fec759 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 547.896478] env[62507]: WARNING nova.compute.manager [req-8ecdfeee-fc87-435d-b62f-7c0a2db38658 req-0be45a14-271d-4e37-97f3-e16a12c8ceb8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Received unexpected event network-vif-plugged-69f0c988-a2d2-4296-92f3-d98e73fec759 for instance with vm_state building and task_state spawning. [ 548.111900] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.112590] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 548.112903] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.335932] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Successfully updated port: 54342925-eaad-4a61-b9ce-ec421de6973e {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 548.354292] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "refresh_cache-598b1fd3-d762-4625-9660-ccf76af2394c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.354557] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquired lock "refresh_cache-598b1fd3-d762-4625-9660-ccf76af2394c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.354695] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.416122] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.416568] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.429242] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 548.441551] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Successfully updated port: 8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 548.457613] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquiring lock "refresh_cache-4144689d-05a1-4e7f-b159-75cbaef82333" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.457715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquired lock "refresh_cache-4144689d-05a1-4e7f-b159-75cbaef82333" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.458233] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.520383] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.520586] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.523530] env[62507]: INFO nova.compute.claims [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.661479] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.685571] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011692b0-592c-4d1a-ab59-78d0732fdec3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.696717] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.700329] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078b0de1-8a07-4af9-bc54-ab6b2a3e274f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.736767] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d238692a-3e72-4ad6-ab5b-1cc7a305339c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.748796] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9efc64-7c90-4701-827b-e9a631073d40 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.765811] env[62507]: DEBUG nova.compute.provider_tree [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.779875] env[62507]: DEBUG nova.scheduler.client.report [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 548.802322] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.802762] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 548.848241] env[62507]: DEBUG nova.compute.utils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 548.849615] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 548.850118] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 548.862768] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 548.946950] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 548.977655] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.977893] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.978068] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.978257] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.978535] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.978613] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.978890] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.978958] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.979111] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.979283] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.979458] env[62507]: DEBUG nova.virt.hardware [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.980363] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361fa640-4a15-4fce-bfb9-ab64458d6abb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.991382] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdb69d5-3d1c-4f1b-a26c-813d32df5211 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.197248] env[62507]: DEBUG nova.policy [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0aa1f0d33bab4b6cad80abf66a9f9213', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc2e7a666c9c4a7f948e5c43d385685e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 549.889030] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Updating instance_info_cache with network_info: [{"id": "54342925-eaad-4a61-b9ce-ec421de6973e", "address": "fa:16:3e:22:77:1d", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54342925-ea", "ovs_interfaceid": "54342925-eaad-4a61-b9ce-ec421de6973e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.902455] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Updating instance_info_cache with network_info: [{"id": "8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90", "address": "fa:16:3e:ab:6c:78", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff7ea11-9a", "ovs_interfaceid": "8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.914466] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Releasing lock "refresh_cache-598b1fd3-d762-4625-9660-ccf76af2394c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.914783] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Instance network_info: |[{"id": "54342925-eaad-4a61-b9ce-ec421de6973e", "address": "fa:16:3e:22:77:1d", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54342925-ea", "ovs_interfaceid": "54342925-eaad-4a61-b9ce-ec421de6973e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 549.915543] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:77:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54342925-eaad-4a61-b9ce-ec421de6973e', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.927800] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Creating folder: Project (3b897f2e1e1a48a1995901829a999601). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.932867] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1ab7cc0-f46e-440f-9466-3e01cceb0598 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.935848] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Releasing lock "refresh_cache-4144689d-05a1-4e7f-b159-75cbaef82333" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.936594] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Instance network_info: |[{"id": "8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90", "address": "fa:16:3e:ab:6c:78", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff7ea11-9a", "ovs_interfaceid": "8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 549.940115] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:6c:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.949645] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Creating folder: Project (934970a68f7547b7bad6aa33c0ea2fee). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.949955] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24111015-252d-45a9-ab95-a79fd5768a8e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.959958] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Created folder: Project (3b897f2e1e1a48a1995901829a999601) in parent group-v497991. [ 549.959958] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Creating folder: Instances. Parent ref: group-v497998. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.961125] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb1d3b92-a4a1-4f27-9047-07bfe65b87a7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.963640] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Created folder: Project (934970a68f7547b7bad6aa33c0ea2fee) in parent group-v497991. [ 549.963885] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Creating folder: Instances. Parent ref: group-v497999. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 549.964819] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7dc3481-40cd-4399-8f3f-cc3d717f6ad4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.976111] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Created folder: Instances in parent group-v497999. [ 549.976186] env[62507]: DEBUG oslo.service.loopingcall [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.976400] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Created folder: Instances in parent group-v497998. [ 549.976610] env[62507]: DEBUG oslo.service.loopingcall [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.977107] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 549.977107] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 549.977827] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-288e5be6-e71d-49da-8b7a-7bc551dbcfd6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.002246] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad93e7ed-c2ab-4145-bef1-ce319dd09071 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.036431] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 550.036431] env[62507]: value = "task-2459901" [ 550.036431] env[62507]: _type = "Task" [ 550.036431] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.036431] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 550.036431] env[62507]: value = "task-2459902" [ 550.036431] env[62507]: _type = "Task" [ 550.036431] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.051828] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459901, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.052041] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459902, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.370144] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.370496] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.382860] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 550.450901] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.451265] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.452779] env[62507]: INFO nova.compute.claims [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.488972] env[62507]: DEBUG nova.compute.manager [req-534b0e12-f7f9-4ad3-a574-99dbe6a7db4c req-48002d02-0588-4d88-85dc-6530805ab31b service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Received event network-vif-plugged-54342925-eaad-4a61-b9ce-ec421de6973e {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 550.488972] env[62507]: DEBUG oslo_concurrency.lockutils [req-534b0e12-f7f9-4ad3-a574-99dbe6a7db4c req-48002d02-0588-4d88-85dc-6530805ab31b service nova] Acquiring lock "598b1fd3-d762-4625-9660-ccf76af2394c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.488972] env[62507]: DEBUG oslo_concurrency.lockutils [req-534b0e12-f7f9-4ad3-a574-99dbe6a7db4c req-48002d02-0588-4d88-85dc-6530805ab31b service nova] Lock "598b1fd3-d762-4625-9660-ccf76af2394c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.489157] env[62507]: DEBUG oslo_concurrency.lockutils [req-534b0e12-f7f9-4ad3-a574-99dbe6a7db4c req-48002d02-0588-4d88-85dc-6530805ab31b service nova] Lock "598b1fd3-d762-4625-9660-ccf76af2394c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.489194] env[62507]: DEBUG nova.compute.manager [req-534b0e12-f7f9-4ad3-a574-99dbe6a7db4c req-48002d02-0588-4d88-85dc-6530805ab31b service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] No waiting events found dispatching network-vif-plugged-54342925-eaad-4a61-b9ce-ec421de6973e {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 550.489436] env[62507]: WARNING nova.compute.manager [req-534b0e12-f7f9-4ad3-a574-99dbe6a7db4c req-48002d02-0588-4d88-85dc-6530805ab31b service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Received unexpected event network-vif-plugged-54342925-eaad-4a61-b9ce-ec421de6973e for instance with vm_state building and task_state spawning. [ 550.565351] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459901, 'name': CreateVM_Task, 'duration_secs': 0.381632} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.565748] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459902, 'name': CreateVM_Task, 'duration_secs': 0.399235} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.566563] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 550.566563] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 550.566924] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.567146] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.567586] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 550.568817] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.572289] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1273f60d-5469-49f1-b826-6f9e048c95b1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.580576] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Waiting for the task: (returnval){ [ 550.580576] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52b17136-d500-f18c-9822-920b1daa093d" [ 550.580576] env[62507]: _type = "Task" [ 550.580576] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.590687] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52b17136-d500-f18c-9822-920b1daa093d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.679224] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7709b31c-a227-4249-aef7-b5b230f5c236 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.689787] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc100392-e4d5-4549-826d-b5adbb0465d4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.726433] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Successfully created port: 014bf186-82a0-4e03-aab0-7e430afa2ffb {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.730301] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e396792-628e-4967-b700-e2f983ee04d2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.738242] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8651cb63-a755-4e88-93fe-7651188f659b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.753209] env[62507]: DEBUG nova.compute.provider_tree [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.771962] env[62507]: DEBUG nova.scheduler.client.report [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 550.790103] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.790306] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 550.853325] env[62507]: DEBUG nova.compute.utils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 550.856607] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 550.856607] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 550.873027] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 550.979214] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 551.013433] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.014325] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.014325] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.014394] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.014938] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.015578] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.015578] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.015578] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.017173] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.017389] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.017580] env[62507]: DEBUG nova.virt.hardware [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.018977] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8042f15f-d5d0-4018-b81f-59f2261b92b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.030959] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfab1da7-7bc6-47e4-ba2b-a551d27cb306 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.090592] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.090861] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 551.091088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.091306] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.091613] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 551.091865] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b79c669-e876-4d4d-a1d2-1440a4f124b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.096502] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Waiting for the task: (returnval){ [ 551.096502] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52629737-411b-ec8f-370a-695cff2d6c7a" [ 551.096502] env[62507]: _type = "Task" [ 551.096502] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.104911] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52629737-411b-ec8f-370a-695cff2d6c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.269886] env[62507]: DEBUG nova.policy [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44522ae9551f4892a15be8b3071e870c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '791470ac011b45f9a4e294deedc3bb02', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 551.610343] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.610343] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 551.613438] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.879641] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.879868] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.897871] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 551.978091] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.978452] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.980151] env[62507]: INFO nova.compute.claims [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.189552] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0fdc8e-1f8d-4dd5-a97b-4e115bf497a4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.197647] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8064b9-8cb4-40fa-b6a2-447935076f46 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.230422] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e08557-9f90-404e-a4a7-f7b984624ad0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.237796] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc796530-5882-4405-bcad-2a0d109d00b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.251149] env[62507]: DEBUG nova.compute.provider_tree [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.264219] env[62507]: DEBUG nova.scheduler.client.report [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 552.281396] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.281895] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 552.318099] env[62507]: DEBUG nova.compute.utils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.319384] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 552.319562] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 552.344529] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 552.422819] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 552.454671] env[62507]: DEBUG nova.compute.manager [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Received event network-changed-69f0c988-a2d2-4296-92f3-d98e73fec759 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 552.454886] env[62507]: DEBUG nova.compute.manager [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Refreshing instance network info cache due to event network-changed-69f0c988-a2d2-4296-92f3-d98e73fec759. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 552.455099] env[62507]: DEBUG oslo_concurrency.lockutils [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] Acquiring lock "refresh_cache-fa608f4a-47e6-4904-af65-a82c107af979" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.455269] env[62507]: DEBUG oslo_concurrency.lockutils [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] Acquired lock "refresh_cache-fa608f4a-47e6-4904-af65-a82c107af979" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.455414] env[62507]: DEBUG nova.network.neutron [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Refreshing network info cache for port 69f0c988-a2d2-4296-92f3-d98e73fec759 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 552.458669] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.459032] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.459222] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.459851] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.459851] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.459851] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.460120] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.460120] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.461068] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.461068] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.461068] env[62507]: DEBUG nova.virt.hardware [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.462932] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510a78f6-13c8-463d-b9ab-f67d9dd60aaf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.466352] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Successfully updated port: 014bf186-82a0-4e03-aab0-7e430afa2ffb {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.473844] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa30ac9-72a9-474a-b300-cea04c76b304 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.496526] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "refresh_cache-1b19cecd-2a04-4077-9758-9947a3bcb4c2" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.496659] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquired lock "refresh_cache-1b19cecd-2a04-4077-9758-9947a3bcb4c2" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.497376] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.634925] env[62507]: DEBUG nova.policy [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '096f153715b3452687d7beba11760482', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e0a6e2f8db448ccb09402b54dccda3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 552.669952] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.571107] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Successfully created port: 97f73950-75bd-463b-ac09-955217bff17c {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.669518] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Updating instance_info_cache with network_info: [{"id": "014bf186-82a0-4e03-aab0-7e430afa2ffb", "address": "fa:16:3e:e4:2e:56", "network": {"id": "9834e543-838b-40de-a48a-13d68d8feeb9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1011786626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc2e7a666c9c4a7f948e5c43d385685e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap014bf186-82", "ovs_interfaceid": "014bf186-82a0-4e03-aab0-7e430afa2ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.690607] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Releasing lock "refresh_cache-1b19cecd-2a04-4077-9758-9947a3bcb4c2" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.690918] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance network_info: |[{"id": "014bf186-82a0-4e03-aab0-7e430afa2ffb", "address": "fa:16:3e:e4:2e:56", "network": {"id": "9834e543-838b-40de-a48a-13d68d8feeb9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1011786626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc2e7a666c9c4a7f948e5c43d385685e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap014bf186-82", "ovs_interfaceid": "014bf186-82a0-4e03-aab0-7e430afa2ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 553.691446] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:2e:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '014bf186-82a0-4e03-aab0-7e430afa2ffb', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 553.699999] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Creating folder: Project (bc2e7a666c9c4a7f948e5c43d385685e). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 553.700842] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35d8ab61-af05-4a0f-b393-d4c689b3afa2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.713642] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Created folder: Project (bc2e7a666c9c4a7f948e5c43d385685e) in parent group-v497991. [ 553.713845] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Creating folder: Instances. Parent ref: group-v498004. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 553.714103] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5850a103-acef-46e0-9968-093c9c7bb7fe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.725102] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Created folder: Instances in parent group-v498004. [ 553.725427] env[62507]: DEBUG oslo.service.loopingcall [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.725626] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 553.726089] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-594e3459-c7a2-4df9-b28b-146b77bebb88 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.751163] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 553.751163] env[62507]: value = "task-2459905" [ 553.751163] env[62507]: _type = "Task" [ 553.751163] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.762232] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459905, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.944685] env[62507]: DEBUG nova.network.neutron [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Updated VIF entry in instance network info cache for port 69f0c988-a2d2-4296-92f3-d98e73fec759. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 553.944852] env[62507]: DEBUG nova.network.neutron [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Updating instance_info_cache with network_info: [{"id": "69f0c988-a2d2-4296-92f3-d98e73fec759", "address": "fa:16:3e:eb:ba:ec", "network": {"id": "2968634f-4390-45d9-b1df-0644cd1244b0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-200178405-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a622b02b7b345b193448f79ac976b06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69f0c988-a2", "ovs_interfaceid": "69f0c988-a2d2-4296-92f3-d98e73fec759", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.961747] env[62507]: DEBUG oslo_concurrency.lockutils [req-6de2411e-b386-4042-89ef-5e8f3f8e1b09 req-64db281d-82b5-4d69-81e7-04041f1a67c8 service nova] Releasing lock "refresh_cache-fa608f4a-47e6-4904-af65-a82c107af979" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.043019] env[62507]: DEBUG nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Received event network-vif-plugged-8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 554.048125] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Acquiring lock "4144689d-05a1-4e7f-b159-75cbaef82333-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.048125] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Lock "4144689d-05a1-4e7f-b159-75cbaef82333-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.048125] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Lock "4144689d-05a1-4e7f-b159-75cbaef82333-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.048125] env[62507]: DEBUG nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] No waiting events found dispatching network-vif-plugged-8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 554.048293] env[62507]: WARNING nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Received unexpected event network-vif-plugged-8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 for instance with vm_state building and task_state spawning. [ 554.048293] env[62507]: DEBUG nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Received event network-changed-54342925-eaad-4a61-b9ce-ec421de6973e {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 554.048293] env[62507]: DEBUG nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Refreshing instance network info cache due to event network-changed-54342925-eaad-4a61-b9ce-ec421de6973e. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 554.048293] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Acquiring lock "refresh_cache-598b1fd3-d762-4625-9660-ccf76af2394c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.048293] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Acquired lock "refresh_cache-598b1fd3-d762-4625-9660-ccf76af2394c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.048452] env[62507]: DEBUG nova.network.neutron [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Refreshing network info cache for port 54342925-eaad-4a61-b9ce-ec421de6973e {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 554.261996] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459905, 'name': CreateVM_Task, 'duration_secs': 0.335419} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.262175] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.262865] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.263035] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.263397] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.263647] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4ac9be-3fd9-43a1-b60d-ffbbd7fd7667 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.271600] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Waiting for the task: (returnval){ [ 554.271600] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f57b1b-fb1f-2f31-b401-3367eed47344" [ 554.271600] env[62507]: _type = "Task" [ 554.271600] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.281690] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f57b1b-fb1f-2f31-b401-3367eed47344, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.794873] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.795183] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 554.795412] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.176705] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.176705] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.176935] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 555.179369] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 555.205314] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.205455] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.205576] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.205667] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.206485] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.206485] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.206485] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 555.206485] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 555.207058] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Successfully created port: 93388ce7-b185-439d-b445-b2d1fc781568 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.209662] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.209922] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.210858] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.210858] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.210987] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.211173] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.211271] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 555.211844] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 555.231920] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.232117] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.232312] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.232467] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 555.234071] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbbb2e5-33e8-43c3-b31d-b06f30b9db63 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.244722] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4d1905-cb03-4b02-a754-ad3a6e79f109 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.262551] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e73456-066e-4c34-a8fc-fc2a423a945b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.271907] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9bcec0-e1a6-4885-8e48-392d501910e0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.308062] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181159MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 555.308370] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.308370] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.398441] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 55592fa8-93f7-49a2-8022-f4d0825c705b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.399972] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fa608f4a-47e6-4904-af65-a82c107af979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.399972] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 598b1fd3-d762-4625-9660-ccf76af2394c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.399972] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4144689d-05a1-4e7f-b159-75cbaef82333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.399972] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.400715] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.400715] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.400715] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 555.400715] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 555.545833] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46ffa76-921f-4613-9302-9009e69c547a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.553817] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09cc17d-8be8-4816-80c2-325f91dfeb72 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.589256] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f90956-febe-4a71-ac57-455cd81e77cb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.596776] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc21aa8b-6b84-47ad-8d0f-d9bb15983d47 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.611602] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.624125] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 555.641652] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 555.641726] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.333s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.906452] env[62507]: DEBUG nova.network.neutron [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Updated VIF entry in instance network info cache for port 54342925-eaad-4a61-b9ce-ec421de6973e. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 555.911587] env[62507]: DEBUG nova.network.neutron [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Updating instance_info_cache with network_info: [{"id": "54342925-eaad-4a61-b9ce-ec421de6973e", "address": "fa:16:3e:22:77:1d", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54342925-ea", "ovs_interfaceid": "54342925-eaad-4a61-b9ce-ec421de6973e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.921947] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Releasing lock "refresh_cache-598b1fd3-d762-4625-9660-ccf76af2394c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.925447] env[62507]: DEBUG nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Received event network-changed-8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 555.925447] env[62507]: DEBUG nova.compute.manager [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Refreshing instance network info cache due to event network-changed-8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 555.925447] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Acquiring lock "refresh_cache-4144689d-05a1-4e7f-b159-75cbaef82333" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.925447] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Acquired lock "refresh_cache-4144689d-05a1-4e7f-b159-75cbaef82333" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.925447] env[62507]: DEBUG nova.network.neutron [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Refreshing network info cache for port 8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 556.618836] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.621411] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.643391] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 556.750504] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.750504] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.752633] env[62507]: INFO nova.compute.claims [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.025615] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983c23fe-b0d9-436e-8eed-9fd88fb70f44 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.036349] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22860a60-5405-47cb-8164-13733e106f6a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.068870] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dc8668-5f20-47d5-a547-ca3324372d41 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.076644] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f77b6a8-1fbf-4633-9ad9-55def922101b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.090926] env[62507]: DEBUG nova.compute.provider_tree [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.100144] env[62507]: DEBUG nova.scheduler.client.report [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 557.118313] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.368s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.118808] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 557.157329] env[62507]: DEBUG nova.compute.utils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 557.162109] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 557.162256] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 557.168924] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 557.249347] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 557.279179] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 557.279577] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 557.279726] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 557.279912] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 557.280126] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 557.280320] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 557.280540] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 557.280702] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 557.280871] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 557.281052] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 557.281299] env[62507]: DEBUG nova.virt.hardware [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 557.282124] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5663c7cf-096f-48f0-9535-ce7e26521518 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.292359] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66daa627-5d9c-4433-9189-4843a4aac761 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.685164] env[62507]: DEBUG nova.policy [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6f5fea623c94f7b816f42501eeb1db2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b508b5886b148c0a8c913a053d839bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 557.832426] env[62507]: DEBUG nova.network.neutron [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Updated VIF entry in instance network info cache for port 8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 557.832809] env[62507]: DEBUG nova.network.neutron [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Updating instance_info_cache with network_info: [{"id": "8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90", "address": "fa:16:3e:ab:6c:78", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff7ea11-9a", "ovs_interfaceid": "8ff7ea11-9ac5-47d5-b9a4-788b52ad7d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.854364] env[62507]: DEBUG oslo_concurrency.lockutils [req-e5837b5c-7417-48d6-82a8-981f0658e83f req-da455dcb-d722-429c-8e56-e6ff2d123605 service nova] Releasing lock "refresh_cache-4144689d-05a1-4e7f-b159-75cbaef82333" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.012662] env[62507]: DEBUG nova.compute.manager [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Received event network-vif-plugged-014bf186-82a0-4e03-aab0-7e430afa2ffb {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 558.015940] env[62507]: DEBUG oslo_concurrency.lockutils [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] Acquiring lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.015940] env[62507]: DEBUG oslo_concurrency.lockutils [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.015940] env[62507]: DEBUG oslo_concurrency.lockutils [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.015940] env[62507]: DEBUG nova.compute.manager [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] No waiting events found dispatching network-vif-plugged-014bf186-82a0-4e03-aab0-7e430afa2ffb {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 558.016645] env[62507]: WARNING nova.compute.manager [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Received unexpected event network-vif-plugged-014bf186-82a0-4e03-aab0-7e430afa2ffb for instance with vm_state building and task_state spawning. [ 558.016645] env[62507]: DEBUG nova.compute.manager [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Received event network-changed-014bf186-82a0-4e03-aab0-7e430afa2ffb {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 558.016645] env[62507]: DEBUG nova.compute.manager [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Refreshing instance network info cache due to event network-changed-014bf186-82a0-4e03-aab0-7e430afa2ffb. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 558.016645] env[62507]: DEBUG oslo_concurrency.lockutils [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] Acquiring lock "refresh_cache-1b19cecd-2a04-4077-9758-9947a3bcb4c2" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.016645] env[62507]: DEBUG oslo_concurrency.lockutils [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] Acquired lock "refresh_cache-1b19cecd-2a04-4077-9758-9947a3bcb4c2" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.016793] env[62507]: DEBUG nova.network.neutron [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Refreshing network info cache for port 014bf186-82a0-4e03-aab0-7e430afa2ffb {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.137232] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Successfully updated port: 97f73950-75bd-463b-ac09-955217bff17c {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 558.155761] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "refresh_cache-c31b20a2-11aa-4d64-a8c6-2d8f889f1560" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.155761] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquired lock "refresh_cache-c31b20a2-11aa-4d64-a8c6-2d8f889f1560" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.155761] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 558.331010] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.238732] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Successfully updated port: 93388ce7-b185-439d-b445-b2d1fc781568 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 559.256171] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "refresh_cache-3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.256171] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquired lock "refresh_cache-3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.256629] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 559.441681] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Updating instance_info_cache with network_info: [{"id": "97f73950-75bd-463b-ac09-955217bff17c", "address": "fa:16:3e:e2:56:c7", "network": {"id": "93658065-34b2-473a-a362-753348b13359", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1799724645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "791470ac011b45f9a4e294deedc3bb02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d47d5e1d-e66d-4f2c-83e6-d5e78c2b767d", "external-id": "nsx-vlan-transportzone-109", "segmentation_id": 109, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97f73950-75", "ovs_interfaceid": "97f73950-75bd-463b-ac09-955217bff17c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.456684] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Releasing lock "refresh_cache-c31b20a2-11aa-4d64-a8c6-2d8f889f1560" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.456684] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance network_info: |[{"id": "97f73950-75bd-463b-ac09-955217bff17c", "address": "fa:16:3e:e2:56:c7", "network": {"id": "93658065-34b2-473a-a362-753348b13359", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1799724645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "791470ac011b45f9a4e294deedc3bb02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d47d5e1d-e66d-4f2c-83e6-d5e78c2b767d", "external-id": "nsx-vlan-transportzone-109", "segmentation_id": 109, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97f73950-75", "ovs_interfaceid": "97f73950-75bd-463b-ac09-955217bff17c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 559.456833] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:56:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd47d5e1d-e66d-4f2c-83e6-d5e78c2b767d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '97f73950-75bd-463b-ac09-955217bff17c', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 559.464852] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Creating folder: Project (791470ac011b45f9a4e294deedc3bb02). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.466783] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.467997] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-130ba027-cbb9-46ba-afd3-1ba23e4ba785 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.481029] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Created folder: Project (791470ac011b45f9a4e294deedc3bb02) in parent group-v497991. [ 559.485204] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Creating folder: Instances. Parent ref: group-v498007. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 559.485820] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3aa67f63-77c5-4a5d-9f29-7f3793679e32 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.496948] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Created folder: Instances in parent group-v498007. [ 559.497730] env[62507]: DEBUG oslo.service.loopingcall [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.497730] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 559.497730] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96dbed11-5143-44e3-84dd-e965fb18ad0c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.521525] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 559.521525] env[62507]: value = "task-2459908" [ 559.521525] env[62507]: _type = "Task" [ 559.521525] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.529358] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459908, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.601346] env[62507]: DEBUG nova.compute.manager [req-8ddf5e6e-0d2f-4885-b855-b79ef4487104 req-6ef87098-ab06-4d61-a7a4-5176a3e18534 service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Received event network-vif-plugged-97f73950-75bd-463b-ac09-955217bff17c {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 559.601609] env[62507]: DEBUG oslo_concurrency.lockutils [req-8ddf5e6e-0d2f-4885-b855-b79ef4487104 req-6ef87098-ab06-4d61-a7a4-5176a3e18534 service nova] Acquiring lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.601878] env[62507]: DEBUG oslo_concurrency.lockutils [req-8ddf5e6e-0d2f-4885-b855-b79ef4487104 req-6ef87098-ab06-4d61-a7a4-5176a3e18534 service nova] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.602012] env[62507]: DEBUG oslo_concurrency.lockutils [req-8ddf5e6e-0d2f-4885-b855-b79ef4487104 req-6ef87098-ab06-4d61-a7a4-5176a3e18534 service nova] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.602192] env[62507]: DEBUG nova.compute.manager [req-8ddf5e6e-0d2f-4885-b855-b79ef4487104 req-6ef87098-ab06-4d61-a7a4-5176a3e18534 service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] No waiting events found dispatching network-vif-plugged-97f73950-75bd-463b-ac09-955217bff17c {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 559.602449] env[62507]: WARNING nova.compute.manager [req-8ddf5e6e-0d2f-4885-b855-b79ef4487104 req-6ef87098-ab06-4d61-a7a4-5176a3e18534 service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Received unexpected event network-vif-plugged-97f73950-75bd-463b-ac09-955217bff17c for instance with vm_state building and task_state spawning. [ 559.690635] env[62507]: DEBUG nova.network.neutron [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Updated VIF entry in instance network info cache for port 014bf186-82a0-4e03-aab0-7e430afa2ffb. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 559.691259] env[62507]: DEBUG nova.network.neutron [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Updating instance_info_cache with network_info: [{"id": "014bf186-82a0-4e03-aab0-7e430afa2ffb", "address": "fa:16:3e:e4:2e:56", "network": {"id": "9834e543-838b-40de-a48a-13d68d8feeb9", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1011786626-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc2e7a666c9c4a7f948e5c43d385685e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap014bf186-82", "ovs_interfaceid": "014bf186-82a0-4e03-aab0-7e430afa2ffb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.719368] env[62507]: DEBUG oslo_concurrency.lockutils [req-1594e3cf-eac4-4e3d-878d-69fe3e62cea6 req-4034854a-3f66-4587-b649-226d3f8747ac service nova] Releasing lock "refresh_cache-1b19cecd-2a04-4077-9758-9947a3bcb4c2" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.035237] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459908, 'name': CreateVM_Task, 'duration_secs': 0.350677} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.035501] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 560.036105] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.036448] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.036812] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 560.037083] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-579b2a40-97a9-4832-b37f-1810e785eb0c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.045616] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Waiting for the task: (returnval){ [ 560.045616] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5255517b-e70a-3468-285b-d553174c40d6" [ 560.045616] env[62507]: _type = "Task" [ 560.045616] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.055702] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5255517b-e70a-3468-285b-d553174c40d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.154476] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Updating instance_info_cache with network_info: [{"id": "93388ce7-b185-439d-b445-b2d1fc781568", "address": "fa:16:3e:71:ee:08", "network": {"id": "29811ada-e03a-45fc-a799-22916e87c373", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1736393850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6e0a6e2f8db448ccb09402b54dccda3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93388ce7-b1", "ovs_interfaceid": "93388ce7-b185-439d-b445-b2d1fc781568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.175107] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Releasing lock "refresh_cache-3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.175107] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance network_info: |[{"id": "93388ce7-b185-439d-b445-b2d1fc781568", "address": "fa:16:3e:71:ee:08", "network": {"id": "29811ada-e03a-45fc-a799-22916e87c373", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1736393850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6e0a6e2f8db448ccb09402b54dccda3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93388ce7-b1", "ovs_interfaceid": "93388ce7-b185-439d-b445-b2d1fc781568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 560.175369] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:ee:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93388ce7-b185-439d-b445-b2d1fc781568', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 560.186620] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Creating folder: Project (6e0a6e2f8db448ccb09402b54dccda3b). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.187537] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4cf79db7-47ef-4ab5-ad83-14925696daf6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.199299] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Created folder: Project (6e0a6e2f8db448ccb09402b54dccda3b) in parent group-v497991. [ 560.199492] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Creating folder: Instances. Parent ref: group-v498010. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 560.199725] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29692ee6-3330-448c-9dd8-2495de54347e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.210241] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Created folder: Instances in parent group-v498010. [ 560.210528] env[62507]: DEBUG oslo.service.loopingcall [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.210719] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 560.212393] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0557ceda-46c2-4a9a-83aa-5d6e40514f30 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.236406] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 560.236406] env[62507]: value = "task-2459911" [ 560.236406] env[62507]: _type = "Task" [ 560.236406] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.251672] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459911, 'name': CreateVM_Task} progress is 6%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.272800] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Successfully created port: 00b16c90-c0c2-4c93-8f29-9b375934f297 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.562608] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.563256] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.564042] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.753708] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459911, 'name': CreateVM_Task, 'duration_secs': 0.35601} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.753877] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 560.754610] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.754762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.755275] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 560.755542] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb693dd0-1ac7-4ab4-880e-f382d832222c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.763644] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Waiting for the task: (returnval){ [ 560.763644] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5217a334-4920-7d8f-2311-fc13bb54e6da" [ 560.763644] env[62507]: _type = "Task" [ 560.763644] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.773558] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5217a334-4920-7d8f-2311-fc13bb54e6da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.275320] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.275797] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 561.276027] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.253526] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "dc241495-c9b7-4f2f-895d-e25008cc738a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.253798] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.268334] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 562.341325] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.341325] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.341325] env[62507]: INFO nova.compute.claims [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.597341] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d7b565-e7b1-4c30-a719-2590205f1ff2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.606848] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b2efed-9710-47c2-8ee1-273daa4c6cb1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.658041] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a654d206-831a-4598-8ccd-70c0838a7f1d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.668188] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cf863d-e212-41e7-8725-7c090c52a59a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.697365] env[62507]: DEBUG nova.compute.provider_tree [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.715865] env[62507]: DEBUG nova.scheduler.client.report [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 562.770388] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.427s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.770388] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 562.847875] env[62507]: DEBUG nova.compute.utils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 562.852782] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 562.852782] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 562.883799] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 562.999350] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 563.039900] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.040161] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.040359] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.040565] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.040710] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.040858] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.041085] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.041255] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.041420] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.041584] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.041755] env[62507]: DEBUG nova.virt.hardware [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.044944] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3341ca-f9b3-4e16-a88c-5efed34e4df6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.053299] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4405087a-a0e3-4e4f-8d0a-a07f5ffc907b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.063797] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Successfully updated port: 00b16c90-c0c2-4c93-8f29-9b375934f297 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 563.084293] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "refresh_cache-58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.084451] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "refresh_cache-58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.084594] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.156723] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "63f63029-d01a-4d55-9753-95b93b7155cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.156966] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "63f63029-d01a-4d55-9753-95b93b7155cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.174203] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 563.260390] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.260664] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.262858] env[62507]: INFO nova.compute.claims [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.409682] env[62507]: DEBUG nova.policy [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e6f911a9e13475ba096b9aaff360edf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba54372059444fc4ae83dc3df0c8023b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 563.461796] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.540262] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d336e81d-5d58-4ecd-bf26-21ac2e0e6ec4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.548837] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcc1cd1-77ef-4a57-a45f-e3bdc504d37e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.585662] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92117c8a-bd13-4bba-a36b-248cf24620b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.596124] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9479e2-2e87-41af-8b98-736c3902b82b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.615659] env[62507]: DEBUG nova.compute.provider_tree [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.628303] env[62507]: DEBUG nova.scheduler.client.report [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 563.650733] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.390s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.651279] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 563.699223] env[62507]: DEBUG nova.compute.utils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.704102] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 563.704102] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 563.713611] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 563.819101] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Updating instance_info_cache with network_info: [{"id": "00b16c90-c0c2-4c93-8f29-9b375934f297", "address": "fa:16:3e:d0:f0:5e", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b16c90-c0", "ovs_interfaceid": "00b16c90-c0c2-4c93-8f29-9b375934f297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.833276] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 563.840424] env[62507]: DEBUG nova.policy [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18173c45dcd54b55b92529114be6f9de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e1dab9fe53543c98d9e58c703c2527a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 563.843095] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "refresh_cache-58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.843095] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance network_info: |[{"id": "00b16c90-c0c2-4c93-8f29-9b375934f297", "address": "fa:16:3e:d0:f0:5e", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b16c90-c0", "ovs_interfaceid": "00b16c90-c0c2-4c93-8f29-9b375934f297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 563.843722] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:f0:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00b16c90-c0c2-4c93-8f29-9b375934f297', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 563.852033] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating folder: Project (6b508b5886b148c0a8c913a053d839bb). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 563.852714] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0e3bce9-fd5b-491f-ab1a-242bc397186b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.863474] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.863708] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.863872] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.864084] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.864234] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.864390] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.864596] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.865016] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.865016] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.865185] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.865345] env[62507]: DEBUG nova.virt.hardware [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.866635] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ce7c6e-3c6c-4eea-972e-5041e57e41a0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.870641] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Created folder: Project (6b508b5886b148c0a8c913a053d839bb) in parent group-v497991. [ 563.870817] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating folder: Instances. Parent ref: group-v498013. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 563.871588] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b459ccf6-9ee7-4ed4-bc02-95833676bde0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.877394] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0d694a-ac15-4a6a-be49-a6ce4e1e5c2b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.884910] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Created folder: Instances in parent group-v498013. [ 563.885158] env[62507]: DEBUG oslo.service.loopingcall [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.893485] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 563.895178] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a46fb6f5-46a8-499b-a6b9-a6acda09a9f6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.911559] env[62507]: DEBUG nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Received event network-changed-97f73950-75bd-463b-ac09-955217bff17c {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 563.911753] env[62507]: DEBUG nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Refreshing instance network info cache due to event network-changed-97f73950-75bd-463b-ac09-955217bff17c. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 563.912078] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Acquiring lock "refresh_cache-c31b20a2-11aa-4d64-a8c6-2d8f889f1560" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.912241] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Acquired lock "refresh_cache-c31b20a2-11aa-4d64-a8c6-2d8f889f1560" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.912438] env[62507]: DEBUG nova.network.neutron [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Refreshing network info cache for port 97f73950-75bd-463b-ac09-955217bff17c {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 563.921296] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 563.921296] env[62507]: value = "task-2459914" [ 563.921296] env[62507]: _type = "Task" [ 563.921296] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.928711] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459914, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.429210] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459914, 'name': CreateVM_Task, 'duration_secs': 0.3276} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.429521] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 564.430014] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.430203] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.430529] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 564.430775] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ffb2bf0-14d6-40d0-9a05-bedb72290a2c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.435598] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 564.435598] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52ba1388-bd3e-4853-fa1a-9e3ab8544d10" [ 564.435598] env[62507]: _type = "Task" [ 564.435598] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.443462] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52ba1388-bd3e-4853-fa1a-9e3ab8544d10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.776921] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Successfully created port: d9714516-b747-464c-876a-7cff1c5655fd {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.949517] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.950305] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 564.950305] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.978422] env[62507]: DEBUG nova.network.neutron [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Updated VIF entry in instance network info cache for port 97f73950-75bd-463b-ac09-955217bff17c. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 564.978966] env[62507]: DEBUG nova.network.neutron [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Updating instance_info_cache with network_info: [{"id": "97f73950-75bd-463b-ac09-955217bff17c", "address": "fa:16:3e:e2:56:c7", "network": {"id": "93658065-34b2-473a-a362-753348b13359", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1799724645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "791470ac011b45f9a4e294deedc3bb02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d47d5e1d-e66d-4f2c-83e6-d5e78c2b767d", "external-id": "nsx-vlan-transportzone-109", "segmentation_id": 109, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97f73950-75", "ovs_interfaceid": "97f73950-75bd-463b-ac09-955217bff17c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.989179] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Releasing lock "refresh_cache-c31b20a2-11aa-4d64-a8c6-2d8f889f1560" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.989415] env[62507]: DEBUG nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Received event network-vif-plugged-93388ce7-b185-439d-b445-b2d1fc781568 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 564.989616] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Acquiring lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.991737] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.991737] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.991737] env[62507]: DEBUG nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] No waiting events found dispatching network-vif-plugged-93388ce7-b185-439d-b445-b2d1fc781568 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 564.991737] env[62507]: WARNING nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Received unexpected event network-vif-plugged-93388ce7-b185-439d-b445-b2d1fc781568 for instance with vm_state building and task_state spawning. [ 564.992370] env[62507]: DEBUG nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Received event network-changed-93388ce7-b185-439d-b445-b2d1fc781568 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 564.992370] env[62507]: DEBUG nova.compute.manager [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Refreshing instance network info cache due to event network-changed-93388ce7-b185-439d-b445-b2d1fc781568. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 564.992370] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Acquiring lock "refresh_cache-3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.992370] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Acquired lock "refresh_cache-3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.992370] env[62507]: DEBUG nova.network.neutron [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Refreshing network info cache for port 93388ce7-b185-439d-b445-b2d1fc781568 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 565.070402] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Successfully created port: 66389934-b120-4a1e-8ce2-2d5fa564d484 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.128814] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.129065] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.491588] env[62507]: DEBUG nova.compute.manager [req-f9753fd5-1e46-4d5b-be48-50dd284f7206 req-97576b06-2947-4f28-a79c-97834567974c service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Received event network-vif-plugged-00b16c90-c0c2-4c93-8f29-9b375934f297 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 565.492191] env[62507]: DEBUG oslo_concurrency.lockutils [req-f9753fd5-1e46-4d5b-be48-50dd284f7206 req-97576b06-2947-4f28-a79c-97834567974c service nova] Acquiring lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.492427] env[62507]: DEBUG oslo_concurrency.lockutils [req-f9753fd5-1e46-4d5b-be48-50dd284f7206 req-97576b06-2947-4f28-a79c-97834567974c service nova] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.492616] env[62507]: DEBUG oslo_concurrency.lockutils [req-f9753fd5-1e46-4d5b-be48-50dd284f7206 req-97576b06-2947-4f28-a79c-97834567974c service nova] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.492791] env[62507]: DEBUG nova.compute.manager [req-f9753fd5-1e46-4d5b-be48-50dd284f7206 req-97576b06-2947-4f28-a79c-97834567974c service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] No waiting events found dispatching network-vif-plugged-00b16c90-c0c2-4c93-8f29-9b375934f297 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 565.492970] env[62507]: WARNING nova.compute.manager [req-f9753fd5-1e46-4d5b-be48-50dd284f7206 req-97576b06-2947-4f28-a79c-97834567974c service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Received unexpected event network-vif-plugged-00b16c90-c0c2-4c93-8f29-9b375934f297 for instance with vm_state building and task_state spawning. [ 565.608606] env[62507]: DEBUG nova.network.neutron [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Updated VIF entry in instance network info cache for port 93388ce7-b185-439d-b445-b2d1fc781568. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 565.609021] env[62507]: DEBUG nova.network.neutron [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Updating instance_info_cache with network_info: [{"id": "93388ce7-b185-439d-b445-b2d1fc781568", "address": "fa:16:3e:71:ee:08", "network": {"id": "29811ada-e03a-45fc-a799-22916e87c373", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1736393850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6e0a6e2f8db448ccb09402b54dccda3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93388ce7-b1", "ovs_interfaceid": "93388ce7-b185-439d-b445-b2d1fc781568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.628212] env[62507]: DEBUG oslo_concurrency.lockutils [req-f623e4e4-2ae3-43a5-9829-6fb58d99af3c req-07078de6-fb1f-4308-95c5-779946e792dc service nova] Releasing lock "refresh_cache-3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.508454] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Successfully updated port: 66389934-b120-4a1e-8ce2-2d5fa564d484 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 566.530929] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "refresh_cache-63f63029-d01a-4d55-9753-95b93b7155cf" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.530929] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquired lock "refresh_cache-63f63029-d01a-4d55-9753-95b93b7155cf" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.531100] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 566.614663] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.615142] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.635941] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.179212] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Updating instance_info_cache with network_info: [{"id": "66389934-b120-4a1e-8ce2-2d5fa564d484", "address": "fa:16:3e:ac:51:1b", "network": {"id": "3dc56b64-c981-4ed5-81d1-980e6bde90fa", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-388479828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e1dab9fe53543c98d9e58c703c2527a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66389934-b1", "ovs_interfaceid": "66389934-b120-4a1e-8ce2-2d5fa564d484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.194197] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Releasing lock "refresh_cache-63f63029-d01a-4d55-9753-95b93b7155cf" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.194504] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance network_info: |[{"id": "66389934-b120-4a1e-8ce2-2d5fa564d484", "address": "fa:16:3e:ac:51:1b", "network": {"id": "3dc56b64-c981-4ed5-81d1-980e6bde90fa", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-388479828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e1dab9fe53543c98d9e58c703c2527a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66389934-b1", "ovs_interfaceid": "66389934-b120-4a1e-8ce2-2d5fa564d484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 567.194900] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:51:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66389934-b120-4a1e-8ce2-2d5fa564d484', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.203216] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Creating folder: Project (2e1dab9fe53543c98d9e58c703c2527a). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.204089] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a395fa5-0a19-4ef7-a5ec-f1ca9ae18c8b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.214367] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Created folder: Project (2e1dab9fe53543c98d9e58c703c2527a) in parent group-v497991. [ 567.214539] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Creating folder: Instances. Parent ref: group-v498016. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.214768] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a15b77d3-3747-4717-b044-7d14d95f7a5c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.223842] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Created folder: Instances in parent group-v498016. [ 567.223842] env[62507]: DEBUG oslo.service.loopingcall [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.223983] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 567.224918] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e481abb2-2a38-44fd-8b48-dada76fecb6b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.247209] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.247209] env[62507]: value = "task-2459917" [ 567.247209] env[62507]: _type = "Task" [ 567.247209] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.260395] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459917, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.756838] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459917, 'name': CreateVM_Task, 'duration_secs': 0.341083} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.757160] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 567.759031] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.759031] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.759031] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 567.759031] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70d5dc4e-3a5b-434e-90bb-daa74be63253 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.764030] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Waiting for the task: (returnval){ [ 567.764030] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529cfd39-146c-ecba-6c4c-c29a9e5f99db" [ 567.764030] env[62507]: _type = "Task" [ 567.764030] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.773986] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529cfd39-146c-ecba-6c4c-c29a9e5f99db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.893633] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Successfully updated port: d9714516-b747-464c-876a-7cff1c5655fd {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 567.905450] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "refresh_cache-dc241495-c9b7-4f2f-895d-e25008cc738a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.905450] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquired lock "refresh_cache-dc241495-c9b7-4f2f-895d-e25008cc738a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.905450] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.010705] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.281276] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.281491] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.281712] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.514628] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "498b6bd7-03d8-44e7-b007-27d86afcb028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.515074] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.713226] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Updating instance_info_cache with network_info: [{"id": "d9714516-b747-464c-876a-7cff1c5655fd", "address": "fa:16:3e:dd:fb:5e", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9714516-b7", "ovs_interfaceid": "d9714516-b747-464c-876a-7cff1c5655fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.739060] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Releasing lock "refresh_cache-dc241495-c9b7-4f2f-895d-e25008cc738a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.739462] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance network_info: |[{"id": "d9714516-b747-464c-876a-7cff1c5655fd", "address": "fa:16:3e:dd:fb:5e", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9714516-b7", "ovs_interfaceid": "d9714516-b747-464c-876a-7cff1c5655fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 568.739726] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:fb:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9714516-b747-464c-876a-7cff1c5655fd', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.755734] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Creating folder: Project (ba54372059444fc4ae83dc3df0c8023b). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.756490] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81f5f2e9-47de-4adf-9ac0-1b2df867a437 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.769414] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Created folder: Project (ba54372059444fc4ae83dc3df0c8023b) in parent group-v497991. [ 568.769414] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Creating folder: Instances. Parent ref: group-v498019. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 568.769414] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc9f1449-f17e-485f-aedd-00da17b7de09 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.786506] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Created folder: Instances in parent group-v498019. [ 568.789243] env[62507]: DEBUG oslo.service.loopingcall [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.790199] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 568.790199] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e496f3c2-5f94-490d-b8c0-b642ab05a8c6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.816775] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.816775] env[62507]: value = "task-2459920" [ 568.816775] env[62507]: _type = "Task" [ 568.816775] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.825511] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459920, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.330950] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459920, 'name': CreateVM_Task, 'duration_secs': 0.417146} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.330950] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 569.330950] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.330950] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.331521] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.331637] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ca0fbd3-6ab3-4a4d-98bb-4b22893247cc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.337023] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Waiting for the task: (returnval){ [ 569.337023] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5216060b-d559-3458-f6a3-ac8f7ea32e89" [ 569.337023] env[62507]: _type = "Task" [ 569.337023] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.346681] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5216060b-d559-3458-f6a3-ac8f7ea32e89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.732292] env[62507]: DEBUG nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Received event network-changed-00b16c90-c0c2-4c93-8f29-9b375934f297 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 569.732497] env[62507]: DEBUG nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Refreshing instance network info cache due to event network-changed-00b16c90-c0c2-4c93-8f29-9b375934f297. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 569.732759] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Acquiring lock "refresh_cache-58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.732941] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Acquired lock "refresh_cache-58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.733201] env[62507]: DEBUG nova.network.neutron [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Refreshing network info cache for port 00b16c90-c0c2-4c93-8f29-9b375934f297 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 569.752769] env[62507]: DEBUG nova.compute.manager [req-d466e25d-83e2-41ec-9815-9943a981422f req-b718fc7b-faaf-4b80-95de-0fef798ced20 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Received event network-vif-plugged-d9714516-b747-464c-876a-7cff1c5655fd {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 569.752769] env[62507]: DEBUG oslo_concurrency.lockutils [req-d466e25d-83e2-41ec-9815-9943a981422f req-b718fc7b-faaf-4b80-95de-0fef798ced20 service nova] Acquiring lock "dc241495-c9b7-4f2f-895d-e25008cc738a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.752769] env[62507]: DEBUG oslo_concurrency.lockutils [req-d466e25d-83e2-41ec-9815-9943a981422f req-b718fc7b-faaf-4b80-95de-0fef798ced20 service nova] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.752769] env[62507]: DEBUG oslo_concurrency.lockutils [req-d466e25d-83e2-41ec-9815-9943a981422f req-b718fc7b-faaf-4b80-95de-0fef798ced20 service nova] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.752947] env[62507]: DEBUG nova.compute.manager [req-d466e25d-83e2-41ec-9815-9943a981422f req-b718fc7b-faaf-4b80-95de-0fef798ced20 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] No waiting events found dispatching network-vif-plugged-d9714516-b747-464c-876a-7cff1c5655fd {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 569.752947] env[62507]: WARNING nova.compute.manager [req-d466e25d-83e2-41ec-9815-9943a981422f req-b718fc7b-faaf-4b80-95de-0fef798ced20 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Received unexpected event network-vif-plugged-d9714516-b747-464c-876a-7cff1c5655fd for instance with vm_state building and task_state spawning. [ 569.850056] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.850585] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 569.850837] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.223126] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.223838] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.026095] env[62507]: DEBUG nova.network.neutron [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Updated VIF entry in instance network info cache for port 00b16c90-c0c2-4c93-8f29-9b375934f297. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 571.026484] env[62507]: DEBUG nova.network.neutron [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Updating instance_info_cache with network_info: [{"id": "00b16c90-c0c2-4c93-8f29-9b375934f297", "address": "fa:16:3e:d0:f0:5e", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b16c90-c0", "ovs_interfaceid": "00b16c90-c0c2-4c93-8f29-9b375934f297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.039920] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Releasing lock "refresh_cache-58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.040201] env[62507]: DEBUG nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Received event network-vif-plugged-66389934-b120-4a1e-8ce2-2d5fa564d484 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 571.040424] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Acquiring lock "63f63029-d01a-4d55-9753-95b93b7155cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.040636] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Lock "63f63029-d01a-4d55-9753-95b93b7155cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.040799] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Lock "63f63029-d01a-4d55-9753-95b93b7155cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.040965] env[62507]: DEBUG nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] No waiting events found dispatching network-vif-plugged-66389934-b120-4a1e-8ce2-2d5fa564d484 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 571.041151] env[62507]: WARNING nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Received unexpected event network-vif-plugged-66389934-b120-4a1e-8ce2-2d5fa564d484 for instance with vm_state building and task_state spawning. [ 571.041334] env[62507]: DEBUG nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Received event network-changed-66389934-b120-4a1e-8ce2-2d5fa564d484 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 571.041542] env[62507]: DEBUG nova.compute.manager [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Refreshing instance network info cache due to event network-changed-66389934-b120-4a1e-8ce2-2d5fa564d484. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 571.041750] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Acquiring lock "refresh_cache-63f63029-d01a-4d55-9753-95b93b7155cf" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.041892] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Acquired lock "refresh_cache-63f63029-d01a-4d55-9753-95b93b7155cf" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.043240] env[62507]: DEBUG nova.network.neutron [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Refreshing network info cache for port 66389934-b120-4a1e-8ce2-2d5fa564d484 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 571.702600] env[62507]: DEBUG nova.network.neutron [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Updated VIF entry in instance network info cache for port 66389934-b120-4a1e-8ce2-2d5fa564d484. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 571.706945] env[62507]: DEBUG nova.network.neutron [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Updating instance_info_cache with network_info: [{"id": "66389934-b120-4a1e-8ce2-2d5fa564d484", "address": "fa:16:3e:ac:51:1b", "network": {"id": "3dc56b64-c981-4ed5-81d1-980e6bde90fa", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-388479828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e1dab9fe53543c98d9e58c703c2527a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66389934-b1", "ovs_interfaceid": "66389934-b120-4a1e-8ce2-2d5fa564d484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.723226] env[62507]: DEBUG oslo_concurrency.lockutils [req-85baad0f-c888-4e0d-a381-8a094647deee req-f12da00e-4d31-4e8f-8216-b64e462b1b22 service nova] Releasing lock "refresh_cache-63f63029-d01a-4d55-9753-95b93b7155cf" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.335636] env[62507]: DEBUG nova.compute.manager [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Received event network-changed-d9714516-b747-464c-876a-7cff1c5655fd {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 573.335894] env[62507]: DEBUG nova.compute.manager [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Refreshing instance network info cache due to event network-changed-d9714516-b747-464c-876a-7cff1c5655fd. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 573.336145] env[62507]: DEBUG oslo_concurrency.lockutils [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] Acquiring lock "refresh_cache-dc241495-c9b7-4f2f-895d-e25008cc738a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.336236] env[62507]: DEBUG oslo_concurrency.lockutils [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] Acquired lock "refresh_cache-dc241495-c9b7-4f2f-895d-e25008cc738a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.336393] env[62507]: DEBUG nova.network.neutron [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Refreshing network info cache for port d9714516-b747-464c-876a-7cff1c5655fd {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 574.642396] env[62507]: DEBUG nova.network.neutron [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Updated VIF entry in instance network info cache for port d9714516-b747-464c-876a-7cff1c5655fd. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 574.642793] env[62507]: DEBUG nova.network.neutron [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Updating instance_info_cache with network_info: [{"id": "d9714516-b747-464c-876a-7cff1c5655fd", "address": "fa:16:3e:dd:fb:5e", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9714516-b7", "ovs_interfaceid": "d9714516-b747-464c-876a-7cff1c5655fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.661875] env[62507]: DEBUG oslo_concurrency.lockutils [req-666fafdf-ca99-4d9f-a1c1-ef4bb4774c4f req-2e9b1543-b029-4ac2-9657-3422993cb424 service nova] Releasing lock "refresh_cache-dc241495-c9b7-4f2f-895d-e25008cc738a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.993167] env[62507]: DEBUG oslo_concurrency.lockutils [None req-29b02f1d-c78a-4414-afbf-8114700422ba tempest-AttachInterfacesV270Test-1979904879 tempest-AttachInterfacesV270Test-1979904879-project-member] Acquiring lock "c5a731c8-f9a7-4a4f-a69a-a429d99b80fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.994025] env[62507]: DEBUG oslo_concurrency.lockutils [None req-29b02f1d-c78a-4414-afbf-8114700422ba tempest-AttachInterfacesV270Test-1979904879 tempest-AttachInterfacesV270Test-1979904879-project-member] Lock "c5a731c8-f9a7-4a4f-a69a-a429d99b80fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.301258] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c627e6a-fc2b-4ca9-96f8-a542a24da03e tempest-ServersAdminNegativeTestJSON-2078858398 tempest-ServersAdminNegativeTestJSON-2078858398-project-member] Acquiring lock "6316a65c-b75f-4432-9bd9-5e9833c18af3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.301811] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c627e6a-fc2b-4ca9-96f8-a542a24da03e tempest-ServersAdminNegativeTestJSON-2078858398 tempest-ServersAdminNegativeTestJSON-2078858398-project-member] Lock "6316a65c-b75f-4432-9bd9-5e9833c18af3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.945812] env[62507]: DEBUG oslo_concurrency.lockutils [None req-915c712c-0ccb-439f-9928-04fb85011147 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "6122b5aa-dde8-4999-b611-6695d01545c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.945812] env[62507]: DEBUG oslo_concurrency.lockutils [None req-915c712c-0ccb-439f-9928-04fb85011147 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "6122b5aa-dde8-4999-b611-6695d01545c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.219130] env[62507]: DEBUG oslo_concurrency.lockutils [None req-eb37c927-c963-4341-b648-ebd6e9eed15d tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] Acquiring lock "b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.219394] env[62507]: DEBUG oslo_concurrency.lockutils [None req-eb37c927-c963-4341-b648-ebd6e9eed15d tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] Lock "b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.939345] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a578e35-1ea4-4aa5-a862-82b7099b1776 tempest-VolumesAssistedSnapshotsTest-1563220377 tempest-VolumesAssistedSnapshotsTest-1563220377-project-member] Acquiring lock "cb9d87f8-b8c4-4dbf-93ed-dab6988fae48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.939345] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a578e35-1ea4-4aa5-a862-82b7099b1776 tempest-VolumesAssistedSnapshotsTest-1563220377 tempest-VolumesAssistedSnapshotsTest-1563220377-project-member] Lock "cb9d87f8-b8c4-4dbf-93ed-dab6988fae48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.217135] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a9d801b9-e687-4dfa-aaa5-389d85004d93 tempest-ServersWithSpecificFlavorTestJSON-413214265 tempest-ServersWithSpecificFlavorTestJSON-413214265-project-member] Acquiring lock "765af4c8-ddae-4d08-a49f-217e038e5555" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.217446] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a9d801b9-e687-4dfa-aaa5-389d85004d93 tempest-ServersWithSpecificFlavorTestJSON-413214265 tempest-ServersWithSpecificFlavorTestJSON-413214265-project-member] Lock "765af4c8-ddae-4d08-a49f-217e038e5555" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.431593] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dc2a5387-743c-4241-9cf1-98b629bb01ed tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] Acquiring lock "01bfc053-50f3-4813-8e4b-aceba0d2440f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.431923] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dc2a5387-743c-4241-9cf1-98b629bb01ed tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] Lock "01bfc053-50f3-4813-8e4b-aceba0d2440f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.937285] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4848aef1-1e45-44a2-b6e4-07e2dad477fe tempest-ServersAdmin275Test-176356115 tempest-ServersAdmin275Test-176356115-project-member] Acquiring lock "fbfd64cf-fa8e-48c0-9410-e3da6080f163" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.940124] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4848aef1-1e45-44a2-b6e4-07e2dad477fe tempest-ServersAdmin275Test-176356115 tempest-ServersAdmin275Test-176356115-project-member] Lock "fbfd64cf-fa8e-48c0-9410-e3da6080f163" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.428963] env[62507]: WARNING oslo_vmware.rw_handles [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 585.428963] env[62507]: ERROR oslo_vmware.rw_handles [ 585.429637] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 585.430961] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 585.431225] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Copying Virtual Disk [datastore2] vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/365d0e8e-d5b6-4efd-9053-1202c86978ed/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 585.431524] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3228bcd8-7d49-4562-8446-8b77f6d2c917 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.441297] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Waiting for the task: (returnval){ [ 585.441297] env[62507]: value = "task-2459925" [ 585.441297] env[62507]: _type = "Task" [ 585.441297] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.453941] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Task: {'id': task-2459925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.950872] env[62507]: DEBUG oslo_vmware.exceptions [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 585.951088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.954232] env[62507]: ERROR nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 585.954232] env[62507]: Faults: ['InvalidArgument'] [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Traceback (most recent call last): [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] yield resources [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self.driver.spawn(context, instance, image_meta, [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self._fetch_image_if_missing(context, vi) [ 585.954232] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] image_cache(vi, tmp_image_ds_loc) [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] vm_util.copy_virtual_disk( [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] session._wait_for_task(vmdk_copy_task) [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] return self.wait_for_task(task_ref) [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] return evt.wait() [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] result = hub.switch() [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.956759] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] return self.greenlet.switch() [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self.f(*self.args, **self.kw) [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] raise exceptions.translate_fault(task_info.error) [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Faults: ['InvalidArgument'] [ 585.957237] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] [ 585.957237] env[62507]: INFO nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Terminating instance [ 585.957237] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.957549] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 585.957549] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-435e9bc5-b2aa-40a2-9d3d-2563627b8dba {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.959944] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "refresh_cache-55592fa8-93f7-49a2-8022-f4d0825c705b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.960119] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquired lock "refresh_cache-55592fa8-93f7-49a2-8022-f4d0825c705b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.960287] env[62507]: DEBUG nova.network.neutron [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.967663] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 585.967663] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 585.968378] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3146cf3f-7fe5-4949-a0aa-73efb96835a5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.974665] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Waiting for the task: (returnval){ [ 585.974665] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529ded2a-959c-d745-da03-899733c54f4d" [ 585.974665] env[62507]: _type = "Task" [ 585.974665] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.983567] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529ded2a-959c-d745-da03-899733c54f4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.002956] env[62507]: DEBUG nova.network.neutron [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.171119] env[62507]: DEBUG nova.network.neutron [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.181164] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Releasing lock "refresh_cache-55592fa8-93f7-49a2-8022-f4d0825c705b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.181164] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 586.181164] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 586.181164] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22291e60-b297-4913-8bb0-971fdd7af519 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.190990] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 586.190990] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5028726-3b15-42a7-89b2-3b2620191a7c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.224703] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 586.225095] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 586.227019] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Deleting the datastore file [datastore2] 55592fa8-93f7-49a2-8022-f4d0825c705b {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 586.227019] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6db9c8d-08c5-4af5-8f21-1f8dd831f4dc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.235021] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Waiting for the task: (returnval){ [ 586.235021] env[62507]: value = "task-2459927" [ 586.235021] env[62507]: _type = "Task" [ 586.235021] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.241772] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Task: {'id': task-2459927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.490714] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 586.493639] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Creating directory with path [datastore2] vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.493639] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e53b842-9539-42b5-83f8-ef0ce42558cd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.507018] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Created directory with path [datastore2] vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.507018] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Fetch image to [datastore2] vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 586.507018] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 586.507018] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47ca940-a8fe-4b7b-93ff-6e1ae819a14d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.515061] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd6d7b6-7d22-4ff9-9da6-8399abb692cb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.524613] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bf10f0-73d7-4f20-a439-a58f80b67f42 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.562578] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc880026-6bbc-4e41-8393-c96798e3593d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.569836] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d7c4716b-0cdf-4127-8980-16bafeaf383b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.657608] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 586.730373] env[62507]: DEBUG oslo_vmware.rw_handles [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 586.806096] env[62507]: DEBUG oslo_vmware.api [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Task: {'id': task-2459927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044607} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.807475] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 586.807986] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 586.807986] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 586.808312] env[62507]: INFO nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Took 0.63 seconds to destroy the instance on the hypervisor. [ 586.808581] env[62507]: DEBUG oslo.service.loopingcall [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.808995] env[62507]: DEBUG oslo_vmware.rw_handles [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 586.809234] env[62507]: DEBUG oslo_vmware.rw_handles [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 586.809480] env[62507]: DEBUG nova.compute.manager [-] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Skipping network deallocation for instance since networking was not requested. {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 586.812037] env[62507]: DEBUG nova.compute.claims [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 586.814156] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.814156] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.059492] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.060177] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.231872] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81b476d-44ab-400d-bc0b-8a1178ad321e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.239492] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d94fe8-9673-4855-9071-6519561fb3b1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.270705] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f092cc81-0294-4fdd-a10e-96a5f0bbc683 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.278064] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a886c2ae-2673-4ae8-aeb6-1c3269457b4c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.291515] env[62507]: DEBUG nova.compute.provider_tree [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.308336] env[62507]: DEBUG nova.scheduler.client.report [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 587.331476] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.519s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.332105] env[62507]: ERROR nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 587.332105] env[62507]: Faults: ['InvalidArgument'] [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Traceback (most recent call last): [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self.driver.spawn(context, instance, image_meta, [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self._fetch_image_if_missing(context, vi) [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] image_cache(vi, tmp_image_ds_loc) [ 587.332105] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] vm_util.copy_virtual_disk( [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] session._wait_for_task(vmdk_copy_task) [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] return self.wait_for_task(task_ref) [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] return evt.wait() [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] result = hub.switch() [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] return self.greenlet.switch() [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 587.332898] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] self.f(*self.args, **self.kw) [ 587.333601] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 587.333601] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] raise exceptions.translate_fault(task_info.error) [ 587.333601] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 587.333601] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Faults: ['InvalidArgument'] [ 587.333601] env[62507]: ERROR nova.compute.manager [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] [ 587.333601] env[62507]: DEBUG nova.compute.utils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 587.343126] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Build of instance 55592fa8-93f7-49a2-8022-f4d0825c705b was re-scheduled: A specified parameter was not correct: fileType [ 587.343126] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 587.343126] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 587.343126] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquiring lock "refresh_cache-55592fa8-93f7-49a2-8022-f4d0825c705b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.343126] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Acquired lock "refresh_cache-55592fa8-93f7-49a2-8022-f4d0825c705b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.343611] env[62507]: DEBUG nova.network.neutron [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.384732] env[62507]: DEBUG nova.network.neutron [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.582331] env[62507]: DEBUG nova.network.neutron [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.594405] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Releasing lock "refresh_cache-55592fa8-93f7-49a2-8022-f4d0825c705b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.594644] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 587.594985] env[62507]: DEBUG nova.compute.manager [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] [instance: 55592fa8-93f7-49a2-8022-f4d0825c705b] Skipping network deallocation for instance since networking was not requested. {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 587.726729] env[62507]: INFO nova.scheduler.client.report [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Deleted allocations for instance 55592fa8-93f7-49a2-8022-f4d0825c705b [ 587.755223] env[62507]: DEBUG oslo_concurrency.lockutils [None req-21a34d8e-66bd-4e5d-81be-a8e787ae28a0 tempest-ServerDiagnosticsV248Test-620583532 tempest-ServerDiagnosticsV248Test-620583532-project-member] Lock "55592fa8-93f7-49a2-8022-f4d0825c705b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.714s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.793318] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 587.868461] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.868814] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.873853] env[62507]: INFO nova.compute.claims [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.270055] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685b09b8-0bdc-434d-9d15-e28d1d563240 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.278521] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbad34e-ac75-4994-8d2b-b1607fd96b0d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.308170] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe08ef14-3d45-4981-a41e-ba95f8f5d266 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.315636] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fda247-c7f8-4f7d-9977-b6e413edcdc1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.328794] env[62507]: DEBUG nova.compute.provider_tree [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.337518] env[62507]: DEBUG nova.scheduler.client.report [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 588.352655] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.484s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.353180] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 588.388431] env[62507]: DEBUG nova.compute.utils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.392683] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 588.392683] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 588.399130] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 588.484699] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 588.506778] env[62507]: DEBUG nova.policy [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9356cc6a11f4151b2f47ece3afacfd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be102f0d924d4932a18e393e92705c8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 588.516285] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.516524] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.516682] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.516863] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.517012] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.517198] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.517406] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.517566] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.517731] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.517896] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.520246] env[62507]: DEBUG nova.virt.hardware [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.521722] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4259952c-a8a6-46a4-b771-3941d1fe68ed {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.535364] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e89741e-3cbf-40f1-b104-3432249ddd45 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.403478] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Successfully created port: b1b06f49-6fc4-41e9-893c-66416177b761 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.885333] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "4a3639c7-8795-4702-a729-8239b0d55d51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.885597] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "4a3639c7-8795-4702-a729-8239b0d55d51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.240510] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Successfully updated port: b1b06f49-6fc4-41e9-893c-66416177b761 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 591.258892] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "refresh_cache-1dcce6af-d9f7-4a24-97c7-4b0425c39d68" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.259039] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired lock "refresh_cache-1dcce6af-d9f7-4a24-97c7-4b0425c39d68" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.259192] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 591.339872] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.058782] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Updating instance_info_cache with network_info: [{"id": "b1b06f49-6fc4-41e9-893c-66416177b761", "address": "fa:16:3e:84:c2:68", "network": {"id": "cc0e3936-b36b-4cb0-97da-e22f71a38b63", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-439428702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be102f0d924d4932a18e393e92705c8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1b06f49-6f", "ovs_interfaceid": "b1b06f49-6fc4-41e9-893c-66416177b761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.078056] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Releasing lock "refresh_cache-1dcce6af-d9f7-4a24-97c7-4b0425c39d68" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.078354] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance network_info: |[{"id": "b1b06f49-6fc4-41e9-893c-66416177b761", "address": "fa:16:3e:84:c2:68", "network": {"id": "cc0e3936-b36b-4cb0-97da-e22f71a38b63", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-439428702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be102f0d924d4932a18e393e92705c8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1b06f49-6f", "ovs_interfaceid": "b1b06f49-6fc4-41e9-893c-66416177b761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 592.078764] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:c2:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60bdba1a-14cf-46b2-9d8b-aeaf4d80c815', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1b06f49-6fc4-41e9-893c-66416177b761', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 592.086473] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating folder: Project (be102f0d924d4932a18e393e92705c8f). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 592.087082] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10e23388-cc83-46f5-ab44-bba188f61fd2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.101778] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Created folder: Project (be102f0d924d4932a18e393e92705c8f) in parent group-v497991. [ 592.101985] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating folder: Instances. Parent ref: group-v498025. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 592.102227] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2b8d41c-b422-4503-aa45-ef99124e3ad2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.111117] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Created folder: Instances in parent group-v498025. [ 592.111345] env[62507]: DEBUG oslo.service.loopingcall [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.112045] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 592.112045] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f887717-ba3d-462c-aebb-7fdc74f810d1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.131344] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 592.131344] env[62507]: value = "task-2459933" [ 592.131344] env[62507]: _type = "Task" [ 592.131344] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.142286] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459933, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.150170] env[62507]: DEBUG nova.compute.manager [req-6dab8cfa-162e-4470-8cb8-8f77b728fb66 req-1c2caa75-5071-4bb5-84b0-0633a30ce878 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Received event network-vif-plugged-b1b06f49-6fc4-41e9-893c-66416177b761 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 592.150397] env[62507]: DEBUG oslo_concurrency.lockutils [req-6dab8cfa-162e-4470-8cb8-8f77b728fb66 req-1c2caa75-5071-4bb5-84b0-0633a30ce878 service nova] Acquiring lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.150609] env[62507]: DEBUG oslo_concurrency.lockutils [req-6dab8cfa-162e-4470-8cb8-8f77b728fb66 req-1c2caa75-5071-4bb5-84b0-0633a30ce878 service nova] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.150800] env[62507]: DEBUG oslo_concurrency.lockutils [req-6dab8cfa-162e-4470-8cb8-8f77b728fb66 req-1c2caa75-5071-4bb5-84b0-0633a30ce878 service nova] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.150979] env[62507]: DEBUG nova.compute.manager [req-6dab8cfa-162e-4470-8cb8-8f77b728fb66 req-1c2caa75-5071-4bb5-84b0-0633a30ce878 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] No waiting events found dispatching network-vif-plugged-b1b06f49-6fc4-41e9-893c-66416177b761 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 592.151517] env[62507]: WARNING nova.compute.manager [req-6dab8cfa-162e-4470-8cb8-8f77b728fb66 req-1c2caa75-5071-4bb5-84b0-0633a30ce878 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Received unexpected event network-vif-plugged-b1b06f49-6fc4-41e9-893c-66416177b761 for instance with vm_state building and task_state spawning. [ 592.644580] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459933, 'name': CreateVM_Task, 'duration_secs': 0.422896} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.645930] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 592.645930] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.645930] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.646504] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 592.646504] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48b3f77f-1d2e-4860-88fb-79ebad726ed2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.654276] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 592.654276] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5263ec54-dba1-de6d-6581-865dbc87936a" [ 592.654276] env[62507]: _type = "Task" [ 592.654276] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.664611] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5263ec54-dba1-de6d-6581-865dbc87936a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.166049] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.166497] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 593.166861] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.312919] env[62507]: DEBUG nova.compute.manager [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Received event network-changed-b1b06f49-6fc4-41e9-893c-66416177b761 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 595.313385] env[62507]: DEBUG nova.compute.manager [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Refreshing instance network info cache due to event network-changed-b1b06f49-6fc4-41e9-893c-66416177b761. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 595.313385] env[62507]: DEBUG oslo_concurrency.lockutils [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] Acquiring lock "refresh_cache-1dcce6af-d9f7-4a24-97c7-4b0425c39d68" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.313703] env[62507]: DEBUG oslo_concurrency.lockutils [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] Acquired lock "refresh_cache-1dcce6af-d9f7-4a24-97c7-4b0425c39d68" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.313703] env[62507]: DEBUG nova.network.neutron [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Refreshing network info cache for port b1b06f49-6fc4-41e9-893c-66416177b761 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 595.955797] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d1b461bd-95cc-4456-8da9-c6a08f85f887 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Acquiring lock "1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.956762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d1b461bd-95cc-4456-8da9-c6a08f85f887 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Lock "1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.991094] env[62507]: DEBUG nova.network.neutron [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Updated VIF entry in instance network info cache for port b1b06f49-6fc4-41e9-893c-66416177b761. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 595.991458] env[62507]: DEBUG nova.network.neutron [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Updating instance_info_cache with network_info: [{"id": "b1b06f49-6fc4-41e9-893c-66416177b761", "address": "fa:16:3e:84:c2:68", "network": {"id": "cc0e3936-b36b-4cb0-97da-e22f71a38b63", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-439428702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be102f0d924d4932a18e393e92705c8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1b06f49-6f", "ovs_interfaceid": "b1b06f49-6fc4-41e9-893c-66416177b761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.008579] env[62507]: DEBUG oslo_concurrency.lockutils [req-9c6b4717-76a2-46a4-b9f5-e31a6456510b req-e03fbfd3-742d-4ef7-9e6a-566dbf9fdff6 service nova] Releasing lock "refresh_cache-1dcce6af-d9f7-4a24-97c7-4b0425c39d68" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.637762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-71381752-6e58-4e46-b0b4-137662f3c991 tempest-ServerMetadataTestJSON-665996216 tempest-ServerMetadataTestJSON-665996216-project-member] Acquiring lock "0eaa55ee-0619-456f-b35b-469c1ed7897d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.638100] env[62507]: DEBUG oslo_concurrency.lockutils [None req-71381752-6e58-4e46-b0b4-137662f3c991 tempest-ServerMetadataTestJSON-665996216 tempest-ServerMetadataTestJSON-665996216-project-member] Lock "0eaa55ee-0619-456f-b35b-469c1ed7897d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.403533] env[62507]: DEBUG oslo_concurrency.lockutils [None req-818f08ba-daee-425d-ba60-97a50301c6ea tempest-FloatingIPsAssociationTestJSON-1307269260 tempest-FloatingIPsAssociationTestJSON-1307269260-project-member] Acquiring lock "bcc0fbd8-e554-488a-8a12-732d7db1a4b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.403840] env[62507]: DEBUG oslo_concurrency.lockutils [None req-818f08ba-daee-425d-ba60-97a50301c6ea tempest-FloatingIPsAssociationTestJSON-1307269260 tempest-FloatingIPsAssociationTestJSON-1307269260-project-member] Lock "bcc0fbd8-e554-488a-8a12-732d7db1a4b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.514656] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a203c354-212c-469f-b8b8-abd83a09ada0 tempest-InstanceActionsV221TestJSON-258376973 tempest-InstanceActionsV221TestJSON-258376973-project-member] Acquiring lock "6d751e02-64bb-41bb-9ded-30db9b885c2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.515612] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a203c354-212c-469f-b8b8-abd83a09ada0 tempest-InstanceActionsV221TestJSON-258376973 tempest-InstanceActionsV221TestJSON-258376973-project-member] Lock "6d751e02-64bb-41bb-9ded-30db9b885c2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.872683] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d8ba961c-4562-40e1-a234-9711d7dc8c66 tempest-ServerActionsV293TestJSON-328365227 tempest-ServerActionsV293TestJSON-328365227-project-member] Acquiring lock "ba7b892b-a955-419f-b46e-e9631150a264" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.873105] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d8ba961c-4562-40e1-a234-9711d7dc8c66 tempest-ServerActionsV293TestJSON-328365227 tempest-ServerActionsV293TestJSON-328365227-project-member] Lock "ba7b892b-a955-419f-b46e-e9631150a264" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.701623] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4174627b-465f-4638-9bb5-2df5203ae66d tempest-ServersTestManualDisk-2088113835 tempest-ServersTestManualDisk-2088113835-project-member] Acquiring lock "a1380ba5-64df-4b21-a80b-96c6d9d80f73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.701767] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4174627b-465f-4638-9bb5-2df5203ae66d tempest-ServersTestManualDisk-2088113835 tempest-ServersTestManualDisk-2088113835-project-member] Lock "a1380ba5-64df-4b21-a80b-96c6d9d80f73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.628942] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.664356] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.664356] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.664356] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.664356] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.664356] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 616.169444] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.169444] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.169444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 616.169444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 616.193520] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195369] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195369] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195369] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195369] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195369] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195629] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195629] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195629] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195629] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 616.195629] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 616.195873] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.195873] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.208842] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.209070] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.209256] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.209442] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 616.210578] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd281af-962d-4be4-a9a4-411d9d5453d4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.221171] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb96104e-8e3a-45ac-a646-42f117761901 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.241334] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a908217-6adf-42c8-b1e8-bddeef7197d9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.250090] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aee0e1-3596-4688-a74e-a9e27f932acb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.281057] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181092MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 616.281277] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.281593] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.385475] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fa608f4a-47e6-4904-af65-a82c107af979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385475] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 598b1fd3-d762-4625-9660-ccf76af2394c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385475] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4144689d-05a1-4e7f-b159-75cbaef82333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385475] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385710] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385710] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385710] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385710] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385844] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 63f63029-d01a-4d55-9753-95b93b7155cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.385844] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 616.412361] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.439213] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.450591] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.461180] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c5a731c8-f9a7-4a4f-a69a-a429d99b80fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.472499] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6316a65c-b75f-4432-9bd9-5e9833c18af3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.486251] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6122b5aa-dde8-4999-b611-6695d01545c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.500390] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.511398] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance cb9d87f8-b8c4-4dbf-93ed-dab6988fae48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.523985] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 765af4c8-ddae-4d08-a49f-217e038e5555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.534404] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01bfc053-50f3-4813-8e4b-aceba0d2440f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.546978] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fbfd64cf-fa8e-48c0-9410-e3da6080f163 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.557729] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d86cd3ea-23d1-488e-acc6-bb4b4b666247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.568785] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.579489] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.591290] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0eaa55ee-0619-456f-b35b-469c1ed7897d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.603229] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance bcc0fbd8-e554-488a-8a12-732d7db1a4b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.614444] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6d751e02-64bb-41bb-9ded-30db9b885c2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.625919] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ba7b892b-a955-419f-b46e-e9631150a264 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.636465] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a1380ba5-64df-4b21-a80b-96c6d9d80f73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.636901] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 616.636901] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 617.082658] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb2d342-49f9-4bfe-bc6b-29bcb1400ea0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.090552] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eaa09f-447c-4dd1-905f-41ab8f951788 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.146604] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bfee69-9a2a-4fa2-bbe4-11bca9e78314 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.155994] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94ad027-39de-42e7-9135-bfe99c3edf6c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.176126] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.195810] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 617.244735] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 617.244735] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.963s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.216936] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 631.139649] env[62507]: DEBUG oslo_concurrency.lockutils [None req-efee469a-ca1a-4f27-95b8-964da8ac7e33 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] Acquiring lock "f8dcc4b2-c1f5-42e5-be12-77647f526cb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.139987] env[62507]: DEBUG oslo_concurrency.lockutils [None req-efee469a-ca1a-4f27-95b8-964da8ac7e33 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] Lock "f8dcc4b2-c1f5-42e5-be12-77647f526cb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.592779] env[62507]: WARNING oslo_vmware.rw_handles [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 634.592779] env[62507]: ERROR oslo_vmware.rw_handles [ 634.593627] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 634.595093] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 634.595338] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Copying Virtual Disk [datastore2] vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/f79fb68d-bc80-4c6b-b25b-ad25d36e81ab/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 634.595623] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49f65b5b-9b8c-47a9-b84e-6e1a1e4638df {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.603382] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Waiting for the task: (returnval){ [ 634.603382] env[62507]: value = "task-2459938" [ 634.603382] env[62507]: _type = "Task" [ 634.603382] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.615071] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Task: {'id': task-2459938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.114352] env[62507]: DEBUG oslo_vmware.exceptions [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 635.114616] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.115186] env[62507]: ERROR nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 635.115186] env[62507]: Faults: ['InvalidArgument'] [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] Traceback (most recent call last): [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] yield resources [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self.driver.spawn(context, instance, image_meta, [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self._fetch_image_if_missing(context, vi) [ 635.115186] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] image_cache(vi, tmp_image_ds_loc) [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] vm_util.copy_virtual_disk( [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] session._wait_for_task(vmdk_copy_task) [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] return self.wait_for_task(task_ref) [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] return evt.wait() [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] result = hub.switch() [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.116071] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] return self.greenlet.switch() [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self.f(*self.args, **self.kw) [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] raise exceptions.translate_fault(task_info.error) [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] Faults: ['InvalidArgument'] [ 635.116755] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] [ 635.116755] env[62507]: INFO nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Terminating instance [ 635.117917] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.117917] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.118039] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 635.118155] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 635.118500] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b0374d0-cd92-41c2-8b23-64c650784fa0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.120628] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdd5ef0-4d6b-4641-9816-f05e83d76e36 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.127398] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 635.127603] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d7ca3ef-f90c-464e-981d-f44d6cd759eb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.129707] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.130734] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 635.130868] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a0f71b8-1698-49b2-b288-445626cae120 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.136138] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Waiting for the task: (returnval){ [ 635.136138] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52789ebd-9c6b-7efd-4391-67b36b2c3739" [ 635.136138] env[62507]: _type = "Task" [ 635.136138] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.142457] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52789ebd-9c6b-7efd-4391-67b36b2c3739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.196507] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 635.196907] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 635.197419] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Deleting the datastore file [datastore2] fa608f4a-47e6-4904-af65-a82c107af979 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.197504] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c362e0b-e601-4d7f-962e-6c9154a69c70 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.204193] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Waiting for the task: (returnval){ [ 635.204193] env[62507]: value = "task-2459940" [ 635.204193] env[62507]: _type = "Task" [ 635.204193] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.212179] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Task: {'id': task-2459940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.647335] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 635.647624] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Creating directory with path [datastore2] vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.647889] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8801683e-f317-48af-a681-7ce6478f4286 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.659789] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Created directory with path [datastore2] vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.660015] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Fetch image to [datastore2] vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 635.660172] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 635.661020] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f724af4e-29be-4435-8bdf-b937288cd67c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.668150] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5756267f-f7bd-4da7-8673-6218818aa8d9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.677404] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebe362c-9353-4357-b808-fc49ce8d684a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.710067] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5783a2-6942-4c9b-8ad2-6da651dd22ae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.717367] env[62507]: DEBUG oslo_vmware.api [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Task: {'id': task-2459940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076443} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.718918] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.719160] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 635.719345] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 635.719526] env[62507]: INFO nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Took 0.60 seconds to destroy the instance on the hypervisor. [ 635.721329] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eb272d36-361e-4fc0-9d5b-ba9301b6a8ea {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.723341] env[62507]: DEBUG nova.compute.claims [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 635.723520] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.723731] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.748947] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 635.828200] env[62507]: DEBUG oslo_vmware.rw_handles [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 635.898510] env[62507]: DEBUG oslo_vmware.rw_handles [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 635.898698] env[62507]: DEBUG oslo_vmware.rw_handles [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 636.205767] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44e84fb-b82f-4bc9-a452-d27c5c3333c9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.213306] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16ae1a0-c731-4a23-83c0-ffdd19a4b4c9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.244146] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec43a9d-9311-4ca4-9824-6428d4026ffc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.251542] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c909d092-fd7f-4d41-990c-d57a5072266c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.264730] env[62507]: DEBUG nova.compute.provider_tree [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.274070] env[62507]: DEBUG nova.scheduler.client.report [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 636.293255] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.569s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.293808] env[62507]: ERROR nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.293808] env[62507]: Faults: ['InvalidArgument'] [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] Traceback (most recent call last): [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self.driver.spawn(context, instance, image_meta, [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self._fetch_image_if_missing(context, vi) [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] image_cache(vi, tmp_image_ds_loc) [ 636.293808] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] vm_util.copy_virtual_disk( [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] session._wait_for_task(vmdk_copy_task) [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] return self.wait_for_task(task_ref) [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] return evt.wait() [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] result = hub.switch() [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] return self.greenlet.switch() [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 636.294089] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] self.f(*self.args, **self.kw) [ 636.294426] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 636.294426] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] raise exceptions.translate_fault(task_info.error) [ 636.294426] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.294426] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] Faults: ['InvalidArgument'] [ 636.294426] env[62507]: ERROR nova.compute.manager [instance: fa608f4a-47e6-4904-af65-a82c107af979] [ 636.294687] env[62507]: DEBUG nova.compute.utils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 636.299386] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Build of instance fa608f4a-47e6-4904-af65-a82c107af979 was re-scheduled: A specified parameter was not correct: fileType [ 636.299386] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 636.299781] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 636.299959] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 636.300150] env[62507]: DEBUG nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 636.300332] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 636.887875] env[62507]: DEBUG nova.network.neutron [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.904308] env[62507]: INFO nova.compute.manager [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] [instance: fa608f4a-47e6-4904-af65-a82c107af979] Took 0.60 seconds to deallocate network for instance. [ 637.028748] env[62507]: INFO nova.scheduler.client.report [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Deleted allocations for instance fa608f4a-47e6-4904-af65-a82c107af979 [ 637.052875] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c6d7f65-8ba8-4502-8ecf-6f9fcc9c6737 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813 tempest-FloatingIPsAssociationNegativeTestJSON-1684553813-project-member] Lock "fa608f4a-47e6-4904-af65-a82c107af979" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.142s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.083801] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 637.142495] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.142684] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.144212] env[62507]: INFO nova.compute.claims [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.527016] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46168d2f-996e-4000-9732-b9f16585eff1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.534539] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85331524-a6c8-4eeb-ac21-c4c18943f76a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.563922] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281c9016-8013-4bee-85a1-cc1afd9569a7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.571532] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048e3d25-b5d6-4d14-a763-f0f37ce62163 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.584379] env[62507]: DEBUG nova.compute.provider_tree [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.593096] env[62507]: DEBUG nova.scheduler.client.report [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 637.607076] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.464s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.607566] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 637.645926] env[62507]: DEBUG nova.compute.utils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.647196] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 637.647412] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 637.656198] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 637.722121] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 637.737916] env[62507]: DEBUG nova.policy [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df0b12531a3e46e4a97a8d4082d6868e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13cb14d09e6f4d84996e4470f4e24eeb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 637.751364] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 637.751664] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 637.751866] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.752113] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 637.752309] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.752509] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 637.752763] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 637.752968] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 637.753231] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 637.753489] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 637.753717] env[62507]: DEBUG nova.virt.hardware [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.754705] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc9f4a4-84d7-4235-91e0-7a0039cf68c6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.763801] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423c66ed-5b7c-499c-8b1b-3a6deab48122 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.173032] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Successfully created port: ad83992c-c7f3-4542-b965-fcb3c0aac441 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.031441] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Successfully updated port: ad83992c-c7f3-4542-b965-fcb3c0aac441 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 639.043844] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "refresh_cache-9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.043986] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "refresh_cache-9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.044161] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 639.122966] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.228370] env[62507]: DEBUG nova.compute.manager [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Received event network-vif-plugged-ad83992c-c7f3-4542-b965-fcb3c0aac441 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 639.228370] env[62507]: DEBUG oslo_concurrency.lockutils [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] Acquiring lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.228370] env[62507]: DEBUG oslo_concurrency.lockutils [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.228370] env[62507]: DEBUG oslo_concurrency.lockutils [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.228970] env[62507]: DEBUG nova.compute.manager [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] No waiting events found dispatching network-vif-plugged-ad83992c-c7f3-4542-b965-fcb3c0aac441 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 639.228970] env[62507]: WARNING nova.compute.manager [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Received unexpected event network-vif-plugged-ad83992c-c7f3-4542-b965-fcb3c0aac441 for instance with vm_state building and task_state spawning. [ 639.228970] env[62507]: DEBUG nova.compute.manager [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Received event network-changed-ad83992c-c7f3-4542-b965-fcb3c0aac441 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 639.228970] env[62507]: DEBUG nova.compute.manager [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Refreshing instance network info cache due to event network-changed-ad83992c-c7f3-4542-b965-fcb3c0aac441. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 639.230382] env[62507]: DEBUG oslo_concurrency.lockutils [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] Acquiring lock "refresh_cache-9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.361756] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Updating instance_info_cache with network_info: [{"id": "ad83992c-c7f3-4542-b965-fcb3c0aac441", "address": "fa:16:3e:a3:c8:57", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad83992c-c7", "ovs_interfaceid": "ad83992c-c7f3-4542-b965-fcb3c0aac441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.378934] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "refresh_cache-9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.379361] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance network_info: |[{"id": "ad83992c-c7f3-4542-b965-fcb3c0aac441", "address": "fa:16:3e:a3:c8:57", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad83992c-c7", "ovs_interfaceid": "ad83992c-c7f3-4542-b965-fcb3c0aac441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 639.380737] env[62507]: DEBUG oslo_concurrency.lockutils [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] Acquired lock "refresh_cache-9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.381063] env[62507]: DEBUG nova.network.neutron [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Refreshing network info cache for port ad83992c-c7f3-4542-b965-fcb3c0aac441 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 639.383146] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:c8:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad83992c-c7f3-4542-b965-fcb3c0aac441', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 639.400607] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating folder: Project (13cb14d09e6f4d84996e4470f4e24eeb). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 639.403772] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3c322ea-c725-4d40-8743-8034e0fa066f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.418489] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created folder: Project (13cb14d09e6f4d84996e4470f4e24eeb) in parent group-v497991. [ 639.418812] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating folder: Instances. Parent ref: group-v498029. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 639.419187] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b0d6006-85bc-4c89-8f78-eeff06cc8617 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.431499] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created folder: Instances in parent group-v498029. [ 639.431823] env[62507]: DEBUG oslo.service.loopingcall [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 639.432443] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 639.432797] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bebf82dc-6fe0-4b2f-95aa-b97bb76b48ab {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.454817] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 639.454817] env[62507]: value = "task-2459943" [ 639.454817] env[62507]: _type = "Task" [ 639.454817] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.464362] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459943, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.914329] env[62507]: DEBUG nova.network.neutron [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Updated VIF entry in instance network info cache for port ad83992c-c7f3-4542-b965-fcb3c0aac441. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 639.915473] env[62507]: DEBUG nova.network.neutron [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Updating instance_info_cache with network_info: [{"id": "ad83992c-c7f3-4542-b965-fcb3c0aac441", "address": "fa:16:3e:a3:c8:57", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad83992c-c7", "ovs_interfaceid": "ad83992c-c7f3-4542-b965-fcb3c0aac441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.928537] env[62507]: DEBUG oslo_concurrency.lockutils [req-fd1c0163-1aef-4d77-adcb-78adf278d10b req-fef3c895-5b8c-40ec-8279-687f2875b246 service nova] Releasing lock "refresh_cache-9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.969397] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459943, 'name': CreateVM_Task, 'duration_secs': 0.295658} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.969736] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 639.970472] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.970708] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.971084] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 639.971374] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95e28135-7217-4fe3-81d5-8f10b6da58b8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.976244] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 639.976244] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52364c92-c0f2-21fb-bac1-dd8c54115b32" [ 639.976244] env[62507]: _type = "Task" [ 639.976244] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.985856] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52364c92-c0f2-21fb-bac1-dd8c54115b32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.486209] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.486480] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 640.486672] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.800190] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "e682e67f-5a36-4851-b870-7099d7db119d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.800515] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "e682e67f-5a36-4851-b870-7099d7db119d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.168446] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.168739] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.163579] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.167279] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.167477] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.168015] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.168252] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 677.168426] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.181040] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.181309] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.181516] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.181709] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 677.182890] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362e43b9-1015-417e-b590-a2d00c8f9fc6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.192639] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e4868a-3498-437d-82d6-6f991c1b0200 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.206072] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fd767b-ec34-4c76-ad2c-f7725ca44186 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.212371] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5b0c25-d094-473b-858a-701f83b3af8c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.241076] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181178MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 677.241227] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.241427] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.334980] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 598b1fd3-d762-4625-9660-ccf76af2394c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.335167] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4144689d-05a1-4e7f-b159-75cbaef82333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.335310] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.335440] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.335562] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.335682] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.335848] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.336018] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 63f63029-d01a-4d55-9753-95b93b7155cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.336146] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.336265] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 677.348686] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.359324] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.369438] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c5a731c8-f9a7-4a4f-a69a-a429d99b80fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.380445] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6316a65c-b75f-4432-9bd9-5e9833c18af3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.392939] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6122b5aa-dde8-4999-b611-6695d01545c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.403125] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.412580] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance cb9d87f8-b8c4-4dbf-93ed-dab6988fae48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.421586] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 765af4c8-ddae-4d08-a49f-217e038e5555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.431686] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01bfc053-50f3-4813-8e4b-aceba0d2440f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.441265] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fbfd64cf-fa8e-48c0-9410-e3da6080f163 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.450603] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d86cd3ea-23d1-488e-acc6-bb4b4b666247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.459569] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.469477] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.480149] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0eaa55ee-0619-456f-b35b-469c1ed7897d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.489465] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance bcc0fbd8-e554-488a-8a12-732d7db1a4b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.498545] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6d751e02-64bb-41bb-9ded-30db9b885c2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.510575] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ba7b892b-a955-419f-b46e-e9631150a264 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.520618] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a1380ba5-64df-4b21-a80b-96c6d9d80f73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.530648] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f8dcc4b2-c1f5-42e5-be12-77647f526cb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.543040] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.543040] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 677.543040] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 677.881599] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aaf8b4-6286-4c1d-8113-2a1da90a381a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.889413] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f9c18b-1b28-4b31-a73c-da81c4919018 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.918259] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2abcb2f-e46f-4ee3-8318-5513bc66e420 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.924917] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7200137e-af8f-4564-963e-a92fc91c954b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.938061] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.946682] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 677.960516] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 677.960516] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.719s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.960689] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.960689] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 678.960689] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 678.980734] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.980906] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981043] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981179] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981307] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981431] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981581] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981728] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981859] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.981981] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 678.982124] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 678.982607] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.609326] env[62507]: WARNING oslo_vmware.rw_handles [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 684.609326] env[62507]: ERROR oslo_vmware.rw_handles [ 684.610478] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 684.611341] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 684.611625] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Copying Virtual Disk [datastore2] vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/65f1852f-a545-46e4-a3e4-70c0aab51607/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 684.611900] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a812f893-9d78-4e42-a7c7-162e1aa086c7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.620285] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Waiting for the task: (returnval){ [ 684.620285] env[62507]: value = "task-2459944" [ 684.620285] env[62507]: _type = "Task" [ 684.620285] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.627955] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Task: {'id': task-2459944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.132649] env[62507]: DEBUG oslo_vmware.exceptions [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 685.132933] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.133505] env[62507]: ERROR nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 685.133505] env[62507]: Faults: ['InvalidArgument'] [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Traceback (most recent call last): [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] yield resources [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self.driver.spawn(context, instance, image_meta, [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self._fetch_image_if_missing(context, vi) [ 685.133505] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] image_cache(vi, tmp_image_ds_loc) [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] vm_util.copy_virtual_disk( [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] session._wait_for_task(vmdk_copy_task) [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] return self.wait_for_task(task_ref) [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] return evt.wait() [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] result = hub.switch() [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.133849] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] return self.greenlet.switch() [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self.f(*self.args, **self.kw) [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] raise exceptions.translate_fault(task_info.error) [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Faults: ['InvalidArgument'] [ 685.134155] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] [ 685.134155] env[62507]: INFO nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Terminating instance [ 685.136026] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.136026] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.136391] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 685.136581] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 685.136804] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48bd30df-b4c0-4cf2-ad63-00387d5d1ad4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.139202] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f83318-bae5-4afd-8c39-215e79e3f96c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.146117] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 685.146374] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec9f0c20-497a-4613-958d-c70e9138993c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.148575] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.148742] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 685.149705] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45e3e049-3220-40de-92de-ab8effeb62e4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.154384] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Waiting for the task: (returnval){ [ 685.154384] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52fff140-f5df-bb50-59d2-affa8b27e833" [ 685.154384] env[62507]: _type = "Task" [ 685.154384] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.161490] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52fff140-f5df-bb50-59d2-affa8b27e833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.232268] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 685.232550] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 685.232635] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Deleting the datastore file [datastore2] 4144689d-05a1-4e7f-b159-75cbaef82333 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.232893] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccc0fdbe-b7a7-40b7-8923-5a91a4e2f7a2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.238754] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Waiting for the task: (returnval){ [ 685.238754] env[62507]: value = "task-2459946" [ 685.238754] env[62507]: _type = "Task" [ 685.238754] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.246227] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Task: {'id': task-2459946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.665384] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 685.665681] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Creating directory with path [datastore2] vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.665903] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f371e3fd-3984-4382-90ca-9c091e5f3540 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.678626] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Created directory with path [datastore2] vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.678838] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Fetch image to [datastore2] vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 685.679029] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 685.679816] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfae472a-1c6c-4189-9cba-8ee5066c5a2c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.687163] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255e769b-38b2-42d0-bbe4-8c0044f9d34e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.696836] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6c5dd2-2a9b-46d5-b45f-ce9f7813da3f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.738609] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0972b2a3-6d75-42be-846d-6dd020f21165 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.751972] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecc820a6-b44e-4827-bed5-00d98d5e9840 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.753896] env[62507]: DEBUG oslo_vmware.api [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Task: {'id': task-2459946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067438} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.754349] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.754349] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.754509] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.754669] env[62507]: INFO nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Took 0.62 seconds to destroy the instance on the hypervisor. [ 685.757313] env[62507]: DEBUG nova.compute.claims [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 685.757483] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.757693] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.778323] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 685.841761] env[62507]: DEBUG oslo_vmware.rw_handles [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 685.904982] env[62507]: DEBUG oslo_vmware.rw_handles [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 685.905192] env[62507]: DEBUG oslo_vmware.rw_handles [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 686.265039] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7cd5f1-512e-4628-9544-43b5725aee84 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.272140] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea72e0a5-67d8-4cf4-9d3f-a4643a76607e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.301578] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f0802c-182d-4300-860a-b251a107c35f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.309301] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5288372e-39ff-4032-a47a-caa532a72db4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.323434] env[62507]: DEBUG nova.compute.provider_tree [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.331934] env[62507]: DEBUG nova.scheduler.client.report [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 686.345225] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.587s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.345794] env[62507]: ERROR nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.345794] env[62507]: Faults: ['InvalidArgument'] [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Traceback (most recent call last): [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self.driver.spawn(context, instance, image_meta, [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self._fetch_image_if_missing(context, vi) [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] image_cache(vi, tmp_image_ds_loc) [ 686.345794] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] vm_util.copy_virtual_disk( [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] session._wait_for_task(vmdk_copy_task) [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] return self.wait_for_task(task_ref) [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] return evt.wait() [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] result = hub.switch() [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] return self.greenlet.switch() [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 686.346186] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] self.f(*self.args, **self.kw) [ 686.346519] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 686.346519] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] raise exceptions.translate_fault(task_info.error) [ 686.346519] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.346519] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Faults: ['InvalidArgument'] [ 686.346519] env[62507]: ERROR nova.compute.manager [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] [ 686.346519] env[62507]: DEBUG nova.compute.utils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.347963] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Build of instance 4144689d-05a1-4e7f-b159-75cbaef82333 was re-scheduled: A specified parameter was not correct: fileType [ 686.347963] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 686.348358] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 686.348535] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 686.348690] env[62507]: DEBUG nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 686.348850] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.699305] env[62507]: DEBUG nova.network.neutron [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.714265] env[62507]: INFO nova.compute.manager [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] [instance: 4144689d-05a1-4e7f-b159-75cbaef82333] Took 0.37 seconds to deallocate network for instance. [ 686.844756] env[62507]: INFO nova.scheduler.client.report [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Deleted allocations for instance 4144689d-05a1-4e7f-b159-75cbaef82333 [ 686.875086] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c7c38635-f12b-4214-95e2-94a14b173eb9 tempest-ServerDiagnosticsNegativeTest-1015129097 tempest-ServerDiagnosticsNegativeTest-1015129097-project-member] Lock "4144689d-05a1-4e7f-b159-75cbaef82333" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.772s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.894725] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 686.947579] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.947975] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.950983] env[62507]: INFO nova.compute.claims [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.361469] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bd9fb1-c2a8-4495-946c-66866924f122 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.369708] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0592e1-25f5-4113-b74f-ac4f400ba908 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.399748] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea756b1e-5cc4-460c-9e29-3c31f60d5452 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.407156] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd471de-9430-4cd9-a92a-91972a5d82ee {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.419914] env[62507]: DEBUG nova.compute.provider_tree [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.428477] env[62507]: DEBUG nova.scheduler.client.report [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 687.442781] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.495s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.443199] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 687.473186] env[62507]: DEBUG nova.compute.utils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.474357] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 687.474523] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 687.483587] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 687.543952] env[62507]: DEBUG nova.policy [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9356cc6a11f4151b2f47ece3afacfd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be102f0d924d4932a18e393e92705c8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 687.550774] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 687.575370] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.575624] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.575782] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.575967] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.576134] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.576309] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.576564] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.576747] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.576919] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.578138] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.578359] env[62507]: DEBUG nova.virt.hardware [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.579221] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a65fb61-b3b6-4450-8bb6-97f2fa01b846 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.587386] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa08c937-f771-4c75-8890-90b59bfceeef {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.948028] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Successfully created port: cc4429be-03c7-4727-9b21-34ef04db1152 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.804222] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Successfully updated port: cc4429be-03c7-4727-9b21-34ef04db1152 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.823011] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "refresh_cache-498b6bd7-03d8-44e7-b007-27d86afcb028" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.823119] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired lock "refresh_cache-498b6bd7-03d8-44e7-b007-27d86afcb028" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.823196] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 688.894249] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.046848] env[62507]: DEBUG nova.compute.manager [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Received event network-vif-plugged-cc4429be-03c7-4727-9b21-34ef04db1152 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 689.047094] env[62507]: DEBUG oslo_concurrency.lockutils [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] Acquiring lock "498b6bd7-03d8-44e7-b007-27d86afcb028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.047333] env[62507]: DEBUG oslo_concurrency.lockutils [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.047459] env[62507]: DEBUG oslo_concurrency.lockutils [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.047627] env[62507]: DEBUG nova.compute.manager [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] No waiting events found dispatching network-vif-plugged-cc4429be-03c7-4727-9b21-34ef04db1152 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 689.047829] env[62507]: WARNING nova.compute.manager [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Received unexpected event network-vif-plugged-cc4429be-03c7-4727-9b21-34ef04db1152 for instance with vm_state building and task_state spawning. [ 689.048028] env[62507]: DEBUG nova.compute.manager [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Received event network-changed-cc4429be-03c7-4727-9b21-34ef04db1152 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 689.048228] env[62507]: DEBUG nova.compute.manager [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Refreshing instance network info cache due to event network-changed-cc4429be-03c7-4727-9b21-34ef04db1152. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 689.048439] env[62507]: DEBUG oslo_concurrency.lockutils [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] Acquiring lock "refresh_cache-498b6bd7-03d8-44e7-b007-27d86afcb028" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.161523] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Updating instance_info_cache with network_info: [{"id": "cc4429be-03c7-4727-9b21-34ef04db1152", "address": "fa:16:3e:7e:a9:8e", "network": {"id": "cc0e3936-b36b-4cb0-97da-e22f71a38b63", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-439428702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be102f0d924d4932a18e393e92705c8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc4429be-03", "ovs_interfaceid": "cc4429be-03c7-4727-9b21-34ef04db1152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.180444] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Releasing lock "refresh_cache-498b6bd7-03d8-44e7-b007-27d86afcb028" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.180734] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance network_info: |[{"id": "cc4429be-03c7-4727-9b21-34ef04db1152", "address": "fa:16:3e:7e:a9:8e", "network": {"id": "cc0e3936-b36b-4cb0-97da-e22f71a38b63", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-439428702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be102f0d924d4932a18e393e92705c8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc4429be-03", "ovs_interfaceid": "cc4429be-03c7-4727-9b21-34ef04db1152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 689.181038] env[62507]: DEBUG oslo_concurrency.lockutils [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] Acquired lock "refresh_cache-498b6bd7-03d8-44e7-b007-27d86afcb028" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.181224] env[62507]: DEBUG nova.network.neutron [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Refreshing network info cache for port cc4429be-03c7-4727-9b21-34ef04db1152 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 689.182403] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:a9:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60bdba1a-14cf-46b2-9d8b-aeaf4d80c815', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc4429be-03c7-4727-9b21-34ef04db1152', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.190346] env[62507]: DEBUG oslo.service.loopingcall [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.193121] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 689.193904] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-274f0879-fea8-4628-b07b-f828b8441223 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.213375] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.213375] env[62507]: value = "task-2459947" [ 689.213375] env[62507]: _type = "Task" [ 689.213375] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.222620] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459947, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.727461] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459947, 'name': CreateVM_Task, 'duration_secs': 0.290697} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.727883] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 689.728715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.729056] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.730101] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 689.730101] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e10640f1-279a-42c1-84d7-813a0287b5e0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.735997] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 689.735997] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52cd14e4-f1f9-125e-e076-8212a155bfb5" [ 689.735997] env[62507]: _type = "Task" [ 689.735997] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.746802] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52cd14e4-f1f9-125e-e076-8212a155bfb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.753652] env[62507]: DEBUG nova.network.neutron [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Updated VIF entry in instance network info cache for port cc4429be-03c7-4727-9b21-34ef04db1152. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 689.753946] env[62507]: DEBUG nova.network.neutron [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Updating instance_info_cache with network_info: [{"id": "cc4429be-03c7-4727-9b21-34ef04db1152", "address": "fa:16:3e:7e:a9:8e", "network": {"id": "cc0e3936-b36b-4cb0-97da-e22f71a38b63", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-439428702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be102f0d924d4932a18e393e92705c8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc4429be-03", "ovs_interfaceid": "cc4429be-03c7-4727-9b21-34ef04db1152", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.765123] env[62507]: DEBUG oslo_concurrency.lockutils [req-aa06cf34-3c29-470a-95be-31fa6b4c08bc req-a2531bf6-0be9-42b6-94c3-c7f32a3603a3 service nova] Releasing lock "refresh_cache-498b6bd7-03d8-44e7-b007-27d86afcb028" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.248949] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.248949] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.248949] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.732828] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.733187] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.814053] env[62507]: WARNING oslo_vmware.rw_handles [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 731.814053] env[62507]: ERROR oslo_vmware.rw_handles [ 731.814720] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 731.816238] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 731.816506] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Copying Virtual Disk [datastore2] vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/d7486069-672d-4d67-8ae6-b72e554ff961/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 731.816789] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d25c4c8-b0e4-4dfb-97b2-92db2fa53e6c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.824528] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Waiting for the task: (returnval){ [ 731.824528] env[62507]: value = "task-2459948" [ 731.824528] env[62507]: _type = "Task" [ 731.824528] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.833550] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Task: {'id': task-2459948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.335500] env[62507]: DEBUG oslo_vmware.exceptions [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 732.335839] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.336399] env[62507]: ERROR nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 732.336399] env[62507]: Faults: ['InvalidArgument'] [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Traceback (most recent call last): [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] yield resources [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self.driver.spawn(context, instance, image_meta, [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self._fetch_image_if_missing(context, vi) [ 732.336399] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] image_cache(vi, tmp_image_ds_loc) [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] vm_util.copy_virtual_disk( [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] session._wait_for_task(vmdk_copy_task) [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] return self.wait_for_task(task_ref) [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] return evt.wait() [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] result = hub.switch() [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 732.336718] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] return self.greenlet.switch() [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self.f(*self.args, **self.kw) [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] raise exceptions.translate_fault(task_info.error) [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Faults: ['InvalidArgument'] [ 732.337038] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] [ 732.337038] env[62507]: INFO nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Terminating instance [ 732.338270] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.339087] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.339724] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 732.339916] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 732.340163] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a1af75c-7d72-4b0f-ba63-8b9b65f8e94f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.342595] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f122b8f-9916-47a6-b278-ad90226f22d6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.349195] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 732.349403] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e387f9c1-377d-469f-8fbf-3f3ceaf6c82f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.351537] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.351715] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 732.352671] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca4bf136-8ddc-4d40-9425-687d7fad932c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.357134] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Waiting for the task: (returnval){ [ 732.357134] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]523c79b8-45ba-fb3f-681b-f45f954f95a8" [ 732.357134] env[62507]: _type = "Task" [ 732.357134] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.364437] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]523c79b8-45ba-fb3f-681b-f45f954f95a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.427614] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 732.427851] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 732.428051] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Deleting the datastore file [datastore2] 598b1fd3-d762-4625-9660-ccf76af2394c {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 732.428330] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8eba912b-c606-4654-885d-26b2df62f0c9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.434678] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Waiting for the task: (returnval){ [ 732.434678] env[62507]: value = "task-2459950" [ 732.434678] env[62507]: _type = "Task" [ 732.434678] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.443484] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Task: {'id': task-2459950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.867811] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 732.868116] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Creating directory with path [datastore2] vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.868329] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdab8d3a-3900-432d-a4c1-1a7a14ff4d04 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.880674] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Created directory with path [datastore2] vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.880674] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Fetch image to [datastore2] vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 732.880674] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 732.881201] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d16842b-1c5d-4785-8184-a1337e38acbd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.888491] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce72eb7-a68c-4776-8ab1-e7b6078327ef {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.897599] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3295ebb-10d3-4434-bd6b-8c1f983ad7b7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.928566] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53417a59-2d80-4481-abbb-364a34b94678 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.934273] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a1d0f450-fb10-4156-a73f-ece935b3980a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.943316] env[62507]: DEBUG oslo_vmware.api [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Task: {'id': task-2459950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065792} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.943546] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.943725] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 732.943896] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 732.944087] env[62507]: INFO nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 732.946267] env[62507]: DEBUG nova.compute.claims [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 732.946445] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.946672] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.955929] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 733.008107] env[62507]: DEBUG oslo_vmware.rw_handles [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 733.068451] env[62507]: DEBUG oslo_vmware.rw_handles [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 733.068636] env[62507]: DEBUG oslo_vmware.rw_handles [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 733.379457] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f7570b-86b9-4c55-9513-0d05edd94815 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.386957] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b600af7-f5f1-4802-8da6-0fcbc67d7f92 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.417845] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d0ec1b-0bb6-4159-b49c-a2c721d80999 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.424728] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f46350-8d1b-41d8-8e1d-0aeb14a744d7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.439035] env[62507]: DEBUG nova.compute.provider_tree [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.448312] env[62507]: DEBUG nova.scheduler.client.report [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 733.463706] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.517s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.464275] env[62507]: ERROR nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 733.464275] env[62507]: Faults: ['InvalidArgument'] [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Traceback (most recent call last): [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self.driver.spawn(context, instance, image_meta, [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self._fetch_image_if_missing(context, vi) [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] image_cache(vi, tmp_image_ds_loc) [ 733.464275] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] vm_util.copy_virtual_disk( [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] session._wait_for_task(vmdk_copy_task) [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] return self.wait_for_task(task_ref) [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] return evt.wait() [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] result = hub.switch() [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] return self.greenlet.switch() [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 733.464707] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] self.f(*self.args, **self.kw) [ 733.465062] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 733.465062] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] raise exceptions.translate_fault(task_info.error) [ 733.465062] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 733.465062] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Faults: ['InvalidArgument'] [ 733.465062] env[62507]: ERROR nova.compute.manager [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] [ 733.465062] env[62507]: DEBUG nova.compute.utils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.466469] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Build of instance 598b1fd3-d762-4625-9660-ccf76af2394c was re-scheduled: A specified parameter was not correct: fileType [ 733.466469] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 733.466838] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 733.467018] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 733.467180] env[62507]: DEBUG nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 733.467346] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.067977] env[62507]: DEBUG nova.network.neutron [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.081635] env[62507]: INFO nova.compute.manager [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 598b1fd3-d762-4625-9660-ccf76af2394c] Took 0.61 seconds to deallocate network for instance. [ 734.190665] env[62507]: INFO nova.scheduler.client.report [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Deleted allocations for instance 598b1fd3-d762-4625-9660-ccf76af2394c [ 734.216385] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c8f607c-c084-4478-a570-61b87be97212 tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "598b1fd3-d762-4625-9660-ccf76af2394c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 191.903s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.233331] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 734.284980] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.284980] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.286495] env[62507]: INFO nova.compute.claims [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.671466] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1318ea52-17cf-4168-be95-1c5bb4306f66 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.679514] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fbdd4c-8268-481a-ba0e-1b4dc6e93465 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.709668] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9448dc-1c2e-406b-b06b-f000ebb15996 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.716586] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4190823-2f8f-4e6d-ae9f-b081088e6eda {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.732472] env[62507]: DEBUG nova.compute.provider_tree [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.741757] env[62507]: DEBUG nova.scheduler.client.report [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 734.760028] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.475s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.760565] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 734.805403] env[62507]: DEBUG nova.compute.utils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 734.806702] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 734.806882] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 734.815738] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 734.881521] env[62507]: DEBUG nova.policy [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c6444e9620b49c78a27ec70ba7b3a00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad19115f1f8c4504b869916adf44c28e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 734.887418] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 734.907693] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 734.908359] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 734.908553] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.908754] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 734.909080] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.909260] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 734.909479] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 734.909644] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 734.909815] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 734.910095] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 734.910186] env[62507]: DEBUG nova.virt.hardware [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 734.911047] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0ea5c5-6dbb-4894-81a6-59b87408b57f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.920062] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d3db2b-06f5-4a5f-8afb-73fda420ce47 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.167092] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.200943] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.334314] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Successfully created port: 6edd7d77-0f18-45ea-b487-85b1b3f42544 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.367238] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a38be65d-2feb-4cfc-ad9f-48e9cd6d8b1a tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Acquiring lock "26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.367449] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a38be65d-2feb-4cfc-ad9f-48e9cd6d8b1a tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.167492] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.199870] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Successfully updated port: 6edd7d77-0f18-45ea-b487-85b1b3f42544 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.211229] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "refresh_cache-2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.211412] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquired lock "refresh_cache-2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.211578] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.265568] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.420885] env[62507]: DEBUG nova.compute.manager [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Received event network-vif-plugged-6edd7d77-0f18-45ea-b487-85b1b3f42544 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 736.421132] env[62507]: DEBUG oslo_concurrency.lockutils [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] Acquiring lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.421341] env[62507]: DEBUG oslo_concurrency.lockutils [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.421596] env[62507]: DEBUG oslo_concurrency.lockutils [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.421756] env[62507]: DEBUG nova.compute.manager [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] No waiting events found dispatching network-vif-plugged-6edd7d77-0f18-45ea-b487-85b1b3f42544 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 736.421928] env[62507]: WARNING nova.compute.manager [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Received unexpected event network-vif-plugged-6edd7d77-0f18-45ea-b487-85b1b3f42544 for instance with vm_state building and task_state spawning. [ 736.422313] env[62507]: DEBUG nova.compute.manager [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Received event network-changed-6edd7d77-0f18-45ea-b487-85b1b3f42544 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 736.422540] env[62507]: DEBUG nova.compute.manager [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Refreshing instance network info cache due to event network-changed-6edd7d77-0f18-45ea-b487-85b1b3f42544. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 736.422759] env[62507]: DEBUG oslo_concurrency.lockutils [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] Acquiring lock "refresh_cache-2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.483945] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Updating instance_info_cache with network_info: [{"id": "6edd7d77-0f18-45ea-b487-85b1b3f42544", "address": "fa:16:3e:75:69:e0", "network": {"id": "195422c8-88a1-427b-bfbd-cdaa14bbd8d5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1643174965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad19115f1f8c4504b869916adf44c28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edd7d77-0f", "ovs_interfaceid": "6edd7d77-0f18-45ea-b487-85b1b3f42544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.495878] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Releasing lock "refresh_cache-2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.496197] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance network_info: |[{"id": "6edd7d77-0f18-45ea-b487-85b1b3f42544", "address": "fa:16:3e:75:69:e0", "network": {"id": "195422c8-88a1-427b-bfbd-cdaa14bbd8d5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1643174965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad19115f1f8c4504b869916adf44c28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edd7d77-0f", "ovs_interfaceid": "6edd7d77-0f18-45ea-b487-85b1b3f42544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 736.496511] env[62507]: DEBUG oslo_concurrency.lockutils [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] Acquired lock "refresh_cache-2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.496694] env[62507]: DEBUG nova.network.neutron [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Refreshing network info cache for port 6edd7d77-0f18-45ea-b487-85b1b3f42544 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.497795] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:69:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6edd7d77-0f18-45ea-b487-85b1b3f42544', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.505473] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Creating folder: Project (ad19115f1f8c4504b869916adf44c28e). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 736.508370] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3fa1f2c-366e-4b14-add7-1ca0cb6d0650 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.519372] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Created folder: Project (ad19115f1f8c4504b869916adf44c28e) in parent group-v497991. [ 736.519556] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Creating folder: Instances. Parent ref: group-v498033. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 736.519806] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ceae8495-b77e-4ceb-9dad-b70edd85be46 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.528268] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Created folder: Instances in parent group-v498033. [ 736.528491] env[62507]: DEBUG oslo.service.loopingcall [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.528668] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 736.528859] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5b9d2d9-40d2-450e-8c3f-8b7033b905c0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.548649] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.548649] env[62507]: value = "task-2459953" [ 736.548649] env[62507]: _type = "Task" [ 736.548649] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.556260] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459953, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.058688] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459953, 'name': CreateVM_Task, 'duration_secs': 0.329023} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.058688] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 737.059144] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.059314] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.059619] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.059855] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7864dbcd-d782-46a2-8ba7-8a9693f9a9d1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.064040] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Waiting for the task: (returnval){ [ 737.064040] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]521acd87-4f69-aabe-ca51-e83be6b53183" [ 737.064040] env[62507]: _type = "Task" [ 737.064040] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.072899] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]521acd87-4f69-aabe-ca51-e83be6b53183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.086033] env[62507]: DEBUG nova.network.neutron [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Updated VIF entry in instance network info cache for port 6edd7d77-0f18-45ea-b487-85b1b3f42544. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 737.086368] env[62507]: DEBUG nova.network.neutron [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Updating instance_info_cache with network_info: [{"id": "6edd7d77-0f18-45ea-b487-85b1b3f42544", "address": "fa:16:3e:75:69:e0", "network": {"id": "195422c8-88a1-427b-bfbd-cdaa14bbd8d5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1643174965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad19115f1f8c4504b869916adf44c28e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edd7d77-0f", "ovs_interfaceid": "6edd7d77-0f18-45ea-b487-85b1b3f42544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.095915] env[62507]: DEBUG oslo_concurrency.lockutils [req-6e7f5a98-aa9a-4af5-a811-75428a4a8970 req-debc9408-04cd-4ce8-9f25-37f382d33b30 service nova] Releasing lock "refresh_cache-2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.162697] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.167282] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.167477] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.576165] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.576465] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.576635] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.167791] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.168045] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 739.168228] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 739.189903] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190073] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190214] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190344] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190471] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190598] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190721] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190842] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.190963] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.191097] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 739.191222] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 739.192079] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.192255] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 739.192427] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.203057] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.203279] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.203479] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.203639] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 739.204787] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1ae61b-3eae-4308-9a66-3f4eb33eb968 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.214425] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6642a27e-c433-4d95-95c2-294cdd170344 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.227085] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b02f0c6-ded7-4db1-8fd7-67bfb63e4f17 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.233528] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa697424-5ab8-46ef-9250-387a0ffbe902 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.263097] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181167MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 739.263097] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.263097] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.337223] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.337435] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.337579] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.337708] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.337832] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.337954] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 63f63029-d01a-4d55-9753-95b93b7155cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.338087] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.338207] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.338324] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.338439] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 739.349560] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c5a731c8-f9a7-4a4f-a69a-a429d99b80fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.363175] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6316a65c-b75f-4432-9bd9-5e9833c18af3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.372629] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6122b5aa-dde8-4999-b611-6695d01545c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.382395] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.392563] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance cb9d87f8-b8c4-4dbf-93ed-dab6988fae48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.403919] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 765af4c8-ddae-4d08-a49f-217e038e5555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.415091] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01bfc053-50f3-4813-8e4b-aceba0d2440f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.424789] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fbfd64cf-fa8e-48c0-9410-e3da6080f163 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.434605] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d86cd3ea-23d1-488e-acc6-bb4b4b666247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.446047] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.456995] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.466501] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0eaa55ee-0619-456f-b35b-469c1ed7897d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.476174] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance bcc0fbd8-e554-488a-8a12-732d7db1a4b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.485461] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 6d751e02-64bb-41bb-9ded-30db9b885c2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.496163] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ba7b892b-a955-419f-b46e-e9631150a264 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.507622] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a1380ba5-64df-4b21-a80b-96c6d9d80f73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.519032] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f8dcc4b2-c1f5-42e5-be12-77647f526cb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.529238] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.539586] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.549309] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 739.549554] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 739.549702] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 739.897102] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e1032a-378f-48d7-adbb-a889446c1d86 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.904815] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c0b35-56bb-4f83-adda-5452443f660a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.934850] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2c8efd-fc9c-40b4-80b3-fbb4bda7561f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.941904] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d709c24c-2ecd-4b5f-a723-c4d464855288 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.955318] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.963295] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 739.980380] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 739.980564] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.718s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.956102] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.567863] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.850207] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.968873] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.374587] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.890771] env[62507]: DEBUG oslo_concurrency.lockutils [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "dc241495-c9b7-4f2f-895d-e25008cc738a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.700874] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "63f63029-d01a-4d55-9753-95b93b7155cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.197663] env[62507]: DEBUG oslo_concurrency.lockutils [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.829072] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "498b6bd7-03d8-44e7-b007-27d86afcb028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.436491] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.403711] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.403711] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.500800] env[62507]: WARNING oslo_vmware.rw_handles [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 780.500800] env[62507]: ERROR oslo_vmware.rw_handles [ 780.501505] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 780.504231] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 780.505027] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Copying Virtual Disk [datastore2] vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/6225046d-bac6-4596-a1c9-0b86920f1404/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 780.505027] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c9f6d8e-d64a-4c40-9628-46888f2e0503 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.513236] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Waiting for the task: (returnval){ [ 780.513236] env[62507]: value = "task-2459954" [ 780.513236] env[62507]: _type = "Task" [ 780.513236] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.522984] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Task: {'id': task-2459954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.023404] env[62507]: DEBUG oslo_vmware.exceptions [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 781.023699] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.024268] env[62507]: ERROR nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.024268] env[62507]: Faults: ['InvalidArgument'] [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Traceback (most recent call last): [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] yield resources [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self.driver.spawn(context, instance, image_meta, [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self._fetch_image_if_missing(context, vi) [ 781.024268] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] image_cache(vi, tmp_image_ds_loc) [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] vm_util.copy_virtual_disk( [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] session._wait_for_task(vmdk_copy_task) [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] return self.wait_for_task(task_ref) [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] return evt.wait() [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] result = hub.switch() [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.024819] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] return self.greenlet.switch() [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self.f(*self.args, **self.kw) [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] raise exceptions.translate_fault(task_info.error) [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Faults: ['InvalidArgument'] [ 781.025394] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] [ 781.025394] env[62507]: INFO nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Terminating instance [ 781.026236] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.026447] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.027127] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 781.027325] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.027557] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02628b4a-af87-46f5-84df-54ebe1a63b1a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.029942] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44b03cb-2ac1-4fad-81c9-d8f88ecafea5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.038153] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.046088] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7558b206-1f73-4094-8285-2f9043f3dbd4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.046088] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.046088] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 781.046088] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc31519b-fadc-4def-9606-409535e49d43 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.053718] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Waiting for the task: (returnval){ [ 781.053718] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e68db4-b292-9311-f79e-54f3f9ef1f9a" [ 781.053718] env[62507]: _type = "Task" [ 781.053718] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.063240] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e68db4-b292-9311-f79e-54f3f9ef1f9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.117814] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.118054] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.118249] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Deleting the datastore file [datastore2] 1b19cecd-2a04-4077-9758-9947a3bcb4c2 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.118866] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfeea114-3506-4720-865c-9f68cb018f52 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.128021] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Waiting for the task: (returnval){ [ 781.128021] env[62507]: value = "task-2459956" [ 781.128021] env[62507]: _type = "Task" [ 781.128021] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.137446] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Task: {'id': task-2459956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.564768] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 781.565169] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Creating directory with path [datastore2] vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.565454] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a246a19f-bfc3-4226-8ce4-c1af57a18c41 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.580328] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Created directory with path [datastore2] vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.580543] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Fetch image to [datastore2] vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 781.580738] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 781.581783] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce793076-7299-4e05-a024-8accd9e0a0b8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.590330] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6ed475-8311-437f-8da7-b8c05bdcb60f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.600135] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b553a16d-4208-497e-bc5a-3c1c7028826e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.641114] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1554cdf-b3b0-4b4a-b00a-d9cdb38a31d8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.649560] env[62507]: DEBUG oslo_vmware.api [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Task: {'id': task-2459956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072048} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.651290] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.651486] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 781.651729] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 781.651931] env[62507]: INFO nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Took 0.62 seconds to destroy the instance on the hypervisor. [ 781.653825] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-afb6f091-27e7-48d2-8faf-64ca03c3a0ee {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.658137] env[62507]: DEBUG nova.compute.claims [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 781.658137] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.658137] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.682114] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 781.764348] env[62507]: DEBUG oslo_vmware.rw_handles [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 781.836643] env[62507]: DEBUG oslo_vmware.rw_handles [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 781.836901] env[62507]: DEBUG oslo_vmware.rw_handles [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 782.168178] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd689be2-022a-43e3-aa31-4cfcfe94351e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.176008] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c69c6a-7951-4ea3-9fbb-82cf6f22f1ce {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.205457] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce43c452-7e48-4778-b899-8d79d464fbd3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.212992] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcc656d-0897-4786-ac7e-1f849c20215b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.227304] env[62507]: DEBUG nova.compute.provider_tree [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.241344] env[62507]: DEBUG nova.scheduler.client.report [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 782.263673] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.606s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.265139] env[62507]: ERROR nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.265139] env[62507]: Faults: ['InvalidArgument'] [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Traceback (most recent call last): [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self.driver.spawn(context, instance, image_meta, [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self._fetch_image_if_missing(context, vi) [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] image_cache(vi, tmp_image_ds_loc) [ 782.265139] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] vm_util.copy_virtual_disk( [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] session._wait_for_task(vmdk_copy_task) [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] return self.wait_for_task(task_ref) [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] return evt.wait() [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] result = hub.switch() [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] return self.greenlet.switch() [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 782.265444] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] self.f(*self.args, **self.kw) [ 782.266189] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 782.266189] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] raise exceptions.translate_fault(task_info.error) [ 782.266189] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.266189] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Faults: ['InvalidArgument'] [ 782.266189] env[62507]: ERROR nova.compute.manager [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] [ 782.266189] env[62507]: DEBUG nova.compute.utils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.267206] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Build of instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 was re-scheduled: A specified parameter was not correct: fileType [ 782.267206] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 782.267206] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 782.267206] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 782.267384] env[62507]: DEBUG nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 782.267633] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 782.987233] env[62507]: DEBUG nova.network.neutron [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.004095] env[62507]: INFO nova.compute.manager [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Took 0.74 seconds to deallocate network for instance. [ 783.113759] env[62507]: INFO nova.scheduler.client.report [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Deleted allocations for instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 [ 783.141630] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a3ab0e76-667b-448c-910f-ffe471b527ef tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.724s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.141630] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 34.173s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.141791] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Acquiring lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.142060] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.142246] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.146748] env[62507]: INFO nova.compute.manager [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Terminating instance [ 783.148644] env[62507]: DEBUG nova.compute.manager [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 783.148843] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 783.152877] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-944de23f-04a5-47af-bce7-4d66ae6cb16f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.163746] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9379b4e-68df-48a5-86a5-ee9d5c4d5f49 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.178083] env[62507]: DEBUG nova.compute.manager [None req-29b02f1d-c78a-4414-afbf-8114700422ba tempest-AttachInterfacesV270Test-1979904879 tempest-AttachInterfacesV270Test-1979904879-project-member] [instance: c5a731c8-f9a7-4a4f-a69a-a429d99b80fa] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.201749] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1b19cecd-2a04-4077-9758-9947a3bcb4c2 could not be found. [ 783.201749] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 783.201749] env[62507]: INFO nova.compute.manager [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 783.201749] env[62507]: DEBUG oslo.service.loopingcall [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.202131] env[62507]: DEBUG nova.compute.manager [-] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 783.202234] env[62507]: DEBUG nova.network.neutron [-] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.208800] env[62507]: DEBUG nova.compute.manager [None req-29b02f1d-c78a-4414-afbf-8114700422ba tempest-AttachInterfacesV270Test-1979904879 tempest-AttachInterfacesV270Test-1979904879-project-member] [instance: c5a731c8-f9a7-4a4f-a69a-a429d99b80fa] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.237891] env[62507]: DEBUG nova.network.neutron [-] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.239515] env[62507]: DEBUG oslo_concurrency.lockutils [None req-29b02f1d-c78a-4414-afbf-8114700422ba tempest-AttachInterfacesV270Test-1979904879 tempest-AttachInterfacesV270Test-1979904879-project-member] Lock "c5a731c8-f9a7-4a4f-a69a-a429d99b80fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.246s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.245580] env[62507]: INFO nova.compute.manager [-] [instance: 1b19cecd-2a04-4077-9758-9947a3bcb4c2] Took 0.04 seconds to deallocate network for instance. [ 783.262212] env[62507]: DEBUG nova.compute.manager [None req-9c627e6a-fc2b-4ca9-96f8-a542a24da03e tempest-ServersAdminNegativeTestJSON-2078858398 tempest-ServersAdminNegativeTestJSON-2078858398-project-member] [instance: 6316a65c-b75f-4432-9bd9-5e9833c18af3] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.325364] env[62507]: DEBUG nova.compute.manager [None req-9c627e6a-fc2b-4ca9-96f8-a542a24da03e tempest-ServersAdminNegativeTestJSON-2078858398 tempest-ServersAdminNegativeTestJSON-2078858398-project-member] [instance: 6316a65c-b75f-4432-9bd9-5e9833c18af3] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.360679] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9c627e6a-fc2b-4ca9-96f8-a542a24da03e tempest-ServersAdminNegativeTestJSON-2078858398 tempest-ServersAdminNegativeTestJSON-2078858398-project-member] Lock "6316a65c-b75f-4432-9bd9-5e9833c18af3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.059s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.373405] env[62507]: DEBUG nova.compute.manager [None req-915c712c-0ccb-439f-9928-04fb85011147 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 6122b5aa-dde8-4999-b611-6695d01545c1] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.433201] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9fe059bf-bdf7-4ee2-a746-fc13895d7c80 tempest-InstanceActionsNegativeTestJSON-2012257067 tempest-InstanceActionsNegativeTestJSON-2012257067-project-member] Lock "1b19cecd-2a04-4077-9758-9947a3bcb4c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.289s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.438314] env[62507]: DEBUG nova.compute.manager [None req-915c712c-0ccb-439f-9928-04fb85011147 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 6122b5aa-dde8-4999-b611-6695d01545c1] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.461854] env[62507]: DEBUG oslo_concurrency.lockutils [None req-915c712c-0ccb-439f-9928-04fb85011147 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "6122b5aa-dde8-4999-b611-6695d01545c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.516s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.472880] env[62507]: DEBUG nova.compute.manager [None req-eb37c927-c963-4341-b648-ebd6e9eed15d tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] [instance: b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.498296] env[62507]: DEBUG nova.compute.manager [None req-eb37c927-c963-4341-b648-ebd6e9eed15d tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] [instance: b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.526082] env[62507]: DEBUG oslo_concurrency.lockutils [None req-eb37c927-c963-4341-b648-ebd6e9eed15d tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] Lock "b8a89a8f-9a7d-4d95-bf9b-7b1c6dea5600" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.307s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.539764] env[62507]: DEBUG nova.compute.manager [None req-7a578e35-1ea4-4aa5-a862-82b7099b1776 tempest-VolumesAssistedSnapshotsTest-1563220377 tempest-VolumesAssistedSnapshotsTest-1563220377-project-member] [instance: cb9d87f8-b8c4-4dbf-93ed-dab6988fae48] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.571184] env[62507]: DEBUG nova.compute.manager [None req-7a578e35-1ea4-4aa5-a862-82b7099b1776 tempest-VolumesAssistedSnapshotsTest-1563220377 tempest-VolumesAssistedSnapshotsTest-1563220377-project-member] [instance: cb9d87f8-b8c4-4dbf-93ed-dab6988fae48] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.606258] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a578e35-1ea4-4aa5-a862-82b7099b1776 tempest-VolumesAssistedSnapshotsTest-1563220377 tempest-VolumesAssistedSnapshotsTest-1563220377-project-member] Lock "cb9d87f8-b8c4-4dbf-93ed-dab6988fae48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.669s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.619485] env[62507]: DEBUG nova.compute.manager [None req-a9d801b9-e687-4dfa-aaa5-389d85004d93 tempest-ServersWithSpecificFlavorTestJSON-413214265 tempest-ServersWithSpecificFlavorTestJSON-413214265-project-member] [instance: 765af4c8-ddae-4d08-a49f-217e038e5555] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.650455] env[62507]: DEBUG nova.compute.manager [None req-a9d801b9-e687-4dfa-aaa5-389d85004d93 tempest-ServersWithSpecificFlavorTestJSON-413214265 tempest-ServersWithSpecificFlavorTestJSON-413214265-project-member] [instance: 765af4c8-ddae-4d08-a49f-217e038e5555] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.683998] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a9d801b9-e687-4dfa-aaa5-389d85004d93 tempest-ServersWithSpecificFlavorTestJSON-413214265 tempest-ServersWithSpecificFlavorTestJSON-413214265-project-member] Lock "765af4c8-ddae-4d08-a49f-217e038e5555" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.466s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.697020] env[62507]: DEBUG nova.compute.manager [None req-dc2a5387-743c-4241-9cf1-98b629bb01ed tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] [instance: 01bfc053-50f3-4813-8e4b-aceba0d2440f] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 783.729315] env[62507]: DEBUG nova.compute.manager [None req-dc2a5387-743c-4241-9cf1-98b629bb01ed tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] [instance: 01bfc053-50f3-4813-8e4b-aceba0d2440f] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 783.773977] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dc2a5387-743c-4241-9cf1-98b629bb01ed tempest-ServerShowV247Test-1066966410 tempest-ServerShowV247Test-1066966410-project-member] Lock "01bfc053-50f3-4813-8e4b-aceba0d2440f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.342s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.787354] env[62507]: DEBUG nova.compute.manager [None req-4848aef1-1e45-44a2-b6e4-07e2dad477fe tempest-ServersAdmin275Test-176356115 tempest-ServersAdmin275Test-176356115-project-member] [instance: fbfd64cf-fa8e-48c0-9410-e3da6080f163] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 784.887898] env[62507]: DEBUG nova.compute.manager [None req-4848aef1-1e45-44a2-b6e4-07e2dad477fe tempest-ServersAdmin275Test-176356115 tempest-ServersAdmin275Test-176356115-project-member] [instance: fbfd64cf-fa8e-48c0-9410-e3da6080f163] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 784.922603] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4848aef1-1e45-44a2-b6e4-07e2dad477fe tempest-ServersAdmin275Test-176356115 tempest-ServersAdmin275Test-176356115-project-member] Lock "fbfd64cf-fa8e-48c0-9410-e3da6080f163" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.985s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.942163] env[62507]: DEBUG nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 785.019211] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.019395] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.021369] env[62507]: INFO nova.compute.claims [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.357775] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.462777] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2c43db-329b-4330-97de-137dacbc0294 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.470473] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9391c2e3-3734-4a78-9816-0eb57685090d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.503676] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e910677-580d-4d8f-a24a-2bd9dd968bf7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.512215] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9011f0e-ebb4-4eaf-80ff-14a62d2ef91f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.527699] env[62507]: DEBUG nova.compute.provider_tree [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.536341] env[62507]: DEBUG nova.scheduler.client.report [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 785.553030] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.533s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.554025] env[62507]: DEBUG nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 785.591715] env[62507]: DEBUG nova.compute.claims [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 785.591715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.591715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.977751] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ca4cfb-ba5a-4ac4-8c0b-4cb8e064e139 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.987172] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243c9a86-9eaa-4d08-9872-737a50c4a576 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.020828] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8335574e-93af-47df-af6a-c7df81996222 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.029802] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfe3bc5-3d87-4190-b09b-ed00c3472df9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.047062] env[62507]: DEBUG nova.compute.provider_tree [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.069979] env[62507]: DEBUG nova.scheduler.client.report [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 786.096867] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.506s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.096958] env[62507]: DEBUG nova.compute.utils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Conflict updating instance d86cd3ea-23d1-488e-acc6-bb4b4b666247. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.098903] env[62507]: DEBUG nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance disappeared during build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 786.099103] env[62507]: DEBUG nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 786.099329] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "refresh_cache-d86cd3ea-23d1-488e-acc6-bb4b4b666247" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.099470] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquired lock "refresh_cache-d86cd3ea-23d1-488e-acc6-bb4b4b666247" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.100064] env[62507]: DEBUG nova.network.neutron [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.139266] env[62507]: DEBUG nova.network.neutron [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.411839] env[62507]: DEBUG nova.network.neutron [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.420879] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Releasing lock "refresh_cache-d86cd3ea-23d1-488e-acc6-bb4b4b666247" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.421617] env[62507]: DEBUG nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 786.421951] env[62507]: DEBUG nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 786.422311] env[62507]: DEBUG nova.network.neutron [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 786.449449] env[62507]: DEBUG nova.network.neutron [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.460412] env[62507]: DEBUG nova.network.neutron [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.470465] env[62507]: INFO nova.compute.manager [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Took 0.05 seconds to deallocate network for instance. [ 786.604385] env[62507]: INFO nova.scheduler.client.report [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Deleted allocations for instance d86cd3ea-23d1-488e-acc6-bb4b4b666247 [ 786.604780] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e505ffe7-1f9a-448f-aa0f-1b07608893d5 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.545s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.605974] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.248s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.606220] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.606420] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.606586] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.611743] env[62507]: INFO nova.compute.manager [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Terminating instance [ 786.613814] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquiring lock "refresh_cache-d86cd3ea-23d1-488e-acc6-bb4b4b666247" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.613814] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Acquired lock "refresh_cache-d86cd3ea-23d1-488e-acc6-bb4b4b666247" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.614923] env[62507]: DEBUG nova.network.neutron [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.621746] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 786.676358] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.676615] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.678111] env[62507]: INFO nova.compute.claims [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.686402] env[62507]: DEBUG nova.network.neutron [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.012328] env[62507]: DEBUG nova.network.neutron [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.023678] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Releasing lock "refresh_cache-d86cd3ea-23d1-488e-acc6-bb4b4b666247" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.024105] env[62507]: DEBUG nova.compute.manager [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 787.024304] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 787.024892] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45c4783e-10be-4c23-986d-dc3ae1fdf1b1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.035437] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6145c934-ad38-4c07-9ec2-6a22bc4e9099 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.071057] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d86cd3ea-23d1-488e-acc6-bb4b4b666247 could not be found. [ 787.071281] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 787.071462] env[62507]: INFO nova.compute.manager [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Took 0.05 seconds to destroy the instance on the hypervisor. [ 787.072014] env[62507]: DEBUG oslo.service.loopingcall [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.074499] env[62507]: DEBUG nova.compute.manager [-] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 787.074727] env[62507]: DEBUG nova.network.neutron [-] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.140033] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccf1ddc-2149-4211-b266-85076fbad593 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.149435] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb66da13-68f0-4781-a0d1-b99888535c80 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.183081] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd3ec31-677b-4bac-9c78-b0479dcb3161 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.190874] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464ce2d9-efaf-4d7b-aa11-ca098e89dd67 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.205657] env[62507]: DEBUG nova.compute.provider_tree [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.214691] env[62507]: DEBUG nova.scheduler.client.report [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 787.233320] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.557s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.233835] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 787.277868] env[62507]: DEBUG nova.compute.utils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.279181] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 787.279326] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 787.281884] env[62507]: DEBUG nova.network.neutron [-] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.292128] env[62507]: DEBUG nova.network.neutron [-] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.299264] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 787.303325] env[62507]: INFO nova.compute.manager [-] [instance: d86cd3ea-23d1-488e-acc6-bb4b4b666247] Took 0.23 seconds to deallocate network for instance. [ 787.397160] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 787.432614] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 787.432850] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 787.433282] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.433282] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 787.433380] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.433526] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 787.433669] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 787.433829] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 787.433999] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 787.434179] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 787.434389] env[62507]: DEBUG nova.virt.hardware [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 787.438279] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f1e24f-78b1-46c7-bbb8-5cfd7e7fd30f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.439072] env[62507]: DEBUG oslo_concurrency.lockutils [None req-39e7c059-eb95-4c17-83b2-61c0ff104e56 tempest-ImagesOneServerTestJSON-396612049 tempest-ImagesOneServerTestJSON-396612049-project-member] Lock "d86cd3ea-23d1-488e-acc6-bb4b4b666247" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.833s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.446782] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4eb965-aa13-45ac-8b80-db651610220f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.471016] env[62507]: DEBUG nova.policy [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf4be2d2579541f4876adae9676798c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8011a5f4c4b47aaa106db1b61c449ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 787.694861] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "4a3639c7-8795-4702-a729-8239b0d55d51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.950341] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Successfully created port: 0daa011f-53db-4cf1-b496-c6d7b6f44701 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.844611] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Successfully updated port: 0daa011f-53db-4cf1-b496-c6d7b6f44701 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 791.859099] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.859225] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquired lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.859416] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 791.957697] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 792.324224] env[62507]: DEBUG nova.compute.manager [req-55169f1b-c866-481c-a8a9-a76d9a28b37e req-e0dcd2e8-56bc-4dc8-8d88-6cda0c21a7ef service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Received event network-vif-plugged-0daa011f-53db-4cf1-b496-c6d7b6f44701 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 792.324224] env[62507]: DEBUG oslo_concurrency.lockutils [req-55169f1b-c866-481c-a8a9-a76d9a28b37e req-e0dcd2e8-56bc-4dc8-8d88-6cda0c21a7ef service nova] Acquiring lock "4a3639c7-8795-4702-a729-8239b0d55d51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.324224] env[62507]: DEBUG oslo_concurrency.lockutils [req-55169f1b-c866-481c-a8a9-a76d9a28b37e req-e0dcd2e8-56bc-4dc8-8d88-6cda0c21a7ef service nova] Lock "4a3639c7-8795-4702-a729-8239b0d55d51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.324224] env[62507]: DEBUG oslo_concurrency.lockutils [req-55169f1b-c866-481c-a8a9-a76d9a28b37e req-e0dcd2e8-56bc-4dc8-8d88-6cda0c21a7ef service nova] Lock "4a3639c7-8795-4702-a729-8239b0d55d51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.324390] env[62507]: DEBUG nova.compute.manager [req-55169f1b-c866-481c-a8a9-a76d9a28b37e req-e0dcd2e8-56bc-4dc8-8d88-6cda0c21a7ef service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] No waiting events found dispatching network-vif-plugged-0daa011f-53db-4cf1-b496-c6d7b6f44701 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 792.324390] env[62507]: WARNING nova.compute.manager [req-55169f1b-c866-481c-a8a9-a76d9a28b37e req-e0dcd2e8-56bc-4dc8-8d88-6cda0c21a7ef service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Received unexpected event network-vif-plugged-0daa011f-53db-4cf1-b496-c6d7b6f44701 for instance with vm_state building and task_state deleting. [ 792.362921] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Updating instance_info_cache with network_info: [{"id": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "address": "fa:16:3e:87:0b:59", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0daa011f-53", "ovs_interfaceid": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.387277] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Releasing lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.387277] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance network_info: |[{"id": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "address": "fa:16:3e:87:0b:59", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0daa011f-53", "ovs_interfaceid": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 792.387409] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:0b:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0daa011f-53db-4cf1-b496-c6d7b6f44701', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.396444] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Creating folder: Project (e8011a5f4c4b47aaa106db1b61c449ba). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 792.397679] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-012a18b5-a52d-43de-84b8-cded4ddb94ff {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.408345] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Created folder: Project (e8011a5f4c4b47aaa106db1b61c449ba) in parent group-v497991. [ 792.408592] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Creating folder: Instances. Parent ref: group-v498036. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 792.409237] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45ab6998-8efa-4a88-b5cb-224fd17b6b2f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.417303] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Created folder: Instances in parent group-v498036. [ 792.417629] env[62507]: DEBUG oslo.service.loopingcall [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.418036] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 792.418233] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0df444ad-ebe3-44be-a105-48c9a4b387a8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.438934] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.438934] env[62507]: value = "task-2459959" [ 792.438934] env[62507]: _type = "Task" [ 792.438934] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.447904] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459959, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.954013] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459959, 'name': CreateVM_Task, 'duration_secs': 0.362027} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.954261] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 792.954940] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.955152] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.955474] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 792.955757] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec71d661-5f79-41ee-9eaa-3fb07d3a9be4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.962239] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Waiting for the task: (returnval){ [ 792.962239] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5264b549-12f6-1aea-dd69-a16ef446997c" [ 792.962239] env[62507]: _type = "Task" [ 792.962239] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.973105] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5264b549-12f6-1aea-dd69-a16ef446997c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.474867] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.475724] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.475999] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.170231] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.170231] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 795.185324] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] There are 0 instances to clean {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 795.185552] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.185687] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances with incomplete migration {{(pid=62507) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 795.202544] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.595019] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.595267] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.025205] env[62507]: DEBUG nova.compute.manager [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Received event network-changed-0daa011f-53db-4cf1-b496-c6d7b6f44701 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 796.025205] env[62507]: DEBUG nova.compute.manager [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Refreshing instance network info cache due to event network-changed-0daa011f-53db-4cf1-b496-c6d7b6f44701. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 796.025654] env[62507]: DEBUG oslo_concurrency.lockutils [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] Acquiring lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.025654] env[62507]: DEBUG oslo_concurrency.lockutils [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] Acquired lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.025654] env[62507]: DEBUG nova.network.neutron [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Refreshing network info cache for port 0daa011f-53db-4cf1-b496-c6d7b6f44701 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 796.211152] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.360587] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5e0a62f-8755-4709-9ddb-da7421e3e9fe tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "35a9caf1-5cb9-4d34-81ed-e064cfc73456" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.360725] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5e0a62f-8755-4709-9ddb-da7421e3e9fe tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "35a9caf1-5cb9-4d34-81ed-e064cfc73456" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.832216] env[62507]: DEBUG nova.network.neutron [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Updated VIF entry in instance network info cache for port 0daa011f-53db-4cf1-b496-c6d7b6f44701. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 796.832629] env[62507]: DEBUG nova.network.neutron [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Updating instance_info_cache with network_info: [{"id": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "address": "fa:16:3e:87:0b:59", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0daa011f-53", "ovs_interfaceid": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.856118] env[62507]: DEBUG oslo_concurrency.lockutils [req-8b9a56d4-8855-411c-92f1-45f02be8086a req-3e89f32f-f255-44f2-b891-c6017c617388 service nova] Releasing lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.167549] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.052023] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0d6ce3-8177-419f-b03e-3833dacab755 tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] Acquiring lock "140c1da2-016b-45da-8134-90e1d51b81e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.052023] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0d6ce3-8177-419f-b03e-3833dacab755 tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] Lock "140c1da2-016b-45da-8134-90e1d51b81e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.163299] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.167665] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.169884] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.562529] env[62507]: DEBUG oslo_concurrency.lockutils [None req-464e09f8-b0df-4663-9c3e-41f2fde84eb1 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "f75f4ffa-8494-4edf-803f-9fe61b4899b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.562849] env[62507]: DEBUG oslo_concurrency.lockutils [None req-464e09f8-b0df-4663-9c3e-41f2fde84eb1 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "f75f4ffa-8494-4edf-803f-9fe61b4899b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.918016] env[62507]: DEBUG oslo_concurrency.lockutils [None req-105792b1-c35c-49c4-8b6d-f0b7c2f2e029 tempest-ServerActionsTestOtherB-1636251776 tempest-ServerActionsTestOtherB-1636251776-project-member] Acquiring lock "c07010ad-0831-4b46-80ca-4532eb3dac7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.918244] env[62507]: DEBUG oslo_concurrency.lockutils [None req-105792b1-c35c-49c4-8b6d-f0b7c2f2e029 tempest-ServerActionsTestOtherB-1636251776 tempest-ServerActionsTestOtherB-1636251776-project-member] Lock "c07010ad-0831-4b46-80ca-4532eb3dac7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.167737] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.168023] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 800.168225] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 800.202133] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.202338] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.202551] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.202730] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205021] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205021] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205021] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205021] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205021] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205297] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 800.205297] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 800.205297] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.205297] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.205297] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 800.492489] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b1634efe-e42d-4af9-abe3-ab5fb821e8ef tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] Acquiring lock "92a87da8-3ed1-4d74-9a6e-abb35d69d9ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.492753] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b1634efe-e42d-4af9-abe3-ab5fb821e8ef tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] Lock "92a87da8-3ed1-4d74-9a6e-abb35d69d9ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.168645] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.182958] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.183265] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.183976] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.184262] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 801.186028] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6f22d6-797c-4d5b-a9c1-2fef6a24c647 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.198017] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f29790-0ef4-4a85-af88-0c8be1464983 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.211785] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b33d0c-25f7-4dab-b9cd-72f4aaaf32d4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.219290] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc9cb69-faac-4a11-80c6-757313dfe77c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.251061] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181182MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 801.251283] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.251532] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.436725] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.436904] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437051] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437186] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437310] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 63f63029-d01a-4d55-9753-95b93b7155cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437431] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437550] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437666] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.437784] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.438085] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.452310] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ba7b892b-a955-419f-b46e-e9631150a264 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.469749] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a1380ba5-64df-4b21-a80b-96c6d9d80f73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.488031] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f8dcc4b2-c1f5-42e5-be12-77647f526cb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.503803] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.521204] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.535157] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.549279] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.560580] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.571548] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 35a9caf1-5cb9-4d34-81ed-e064cfc73456 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.585856] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 140c1da2-016b-45da-8134-90e1d51b81e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.598844] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f75f4ffa-8494-4edf-803f-9fe61b4899b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.610108] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c07010ad-0831-4b46-80ca-4532eb3dac7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.621328] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 92a87da8-3ed1-4d74-9a6e-abb35d69d9ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.621584] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 801.621863] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 801.641614] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing inventories for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 801.669247] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating ProviderTree inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 801.669452] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.685884] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing aggregate associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, aggregates: None {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 801.708962] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing trait associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 802.096609] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444fd0e9-cb25-445d-ace3-51cefa88e455 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.104583] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7733a93c-a362-4a4a-be1a-fef53e4c213a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.135298] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c83eb8a-dfb1-42c6-9ed3-f3015884481b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.144525] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427b73b1-f162-4056-a41f-7b2b0483b654 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.159337] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.168749] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 802.198618] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 802.198618] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.944s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.217611] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f10a1b7-343d-4deb-8359-71b91b4ae960 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Acquiring lock "8e909a0f-02f7-405b-8a4a-bcf555db245d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.217845] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f10a1b7-343d-4deb-8359-71b91b4ae960 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Lock "8e909a0f-02f7-405b-8a4a-bcf555db245d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.213145] env[62507]: DEBUG oslo_concurrency.lockutils [None req-564b7422-5e0a-4690-9a9d-dd7010e3fed4 tempest-ServersAaction247Test-1837452585 tempest-ServersAaction247Test-1837452585-project-member] Acquiring lock "3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.213407] env[62507]: DEBUG oslo_concurrency.lockutils [None req-564b7422-5e0a-4690-9a9d-dd7010e3fed4 tempest-ServersAaction247Test-1837452585 tempest-ServersAaction247Test-1837452585-project-member] Lock "3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.460660] env[62507]: DEBUG oslo_concurrency.lockutils [None req-257ab6f6-ec26-40b3-827d-bd9f4eedcd99 tempest-ServersNegativeTestMultiTenantJSON-288137237 tempest-ServersNegativeTestMultiTenantJSON-288137237-project-member] Acquiring lock "31ed600c-d84c-4595-aceb-38f5d4e5aaff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.460958] env[62507]: DEBUG oslo_concurrency.lockutils [None req-257ab6f6-ec26-40b3-827d-bd9f4eedcd99 tempest-ServersNegativeTestMultiTenantJSON-288137237 tempest-ServersNegativeTestMultiTenantJSON-288137237-project-member] Lock "31ed600c-d84c-4595-aceb-38f5d4e5aaff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.240809] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98a06325-6868-4eb7-9e7d-4b618a321e21 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] Acquiring lock "36c7ffe9-6a5f-4758-b1e8-36c0330d9a23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.241171] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98a06325-6868-4eb7-9e7d-4b618a321e21 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] Lock "36c7ffe9-6a5f-4758-b1e8-36c0330d9a23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.855541] env[62507]: WARNING oslo_vmware.rw_handles [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 831.855541] env[62507]: ERROR oslo_vmware.rw_handles [ 831.856333] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 831.858236] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 831.858544] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Copying Virtual Disk [datastore2] vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/90c341ba-36f6-4fc1-bc20-ba15f21a5131/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 831.858913] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55e1cf73-34cc-4496-9a94-94efaf03ca50 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.868543] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Waiting for the task: (returnval){ [ 831.868543] env[62507]: value = "task-2459967" [ 831.868543] env[62507]: _type = "Task" [ 831.868543] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.879071] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Task: {'id': task-2459967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.379599] env[62507]: DEBUG oslo_vmware.exceptions [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 832.379749] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.380396] env[62507]: ERROR nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 832.380396] env[62507]: Faults: ['InvalidArgument'] [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Traceback (most recent call last): [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] yield resources [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self.driver.spawn(context, instance, image_meta, [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self._fetch_image_if_missing(context, vi) [ 832.380396] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] image_cache(vi, tmp_image_ds_loc) [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] vm_util.copy_virtual_disk( [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] session._wait_for_task(vmdk_copy_task) [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] return self.wait_for_task(task_ref) [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] return evt.wait() [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] result = hub.switch() [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 832.380865] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] return self.greenlet.switch() [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self.f(*self.args, **self.kw) [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] raise exceptions.translate_fault(task_info.error) [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Faults: ['InvalidArgument'] [ 832.381327] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] [ 832.381327] env[62507]: INFO nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Terminating instance [ 832.382434] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.382643] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.382979] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a310e373-ed63-4207-9dd7-570d0b95bcc2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.385659] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 832.385889] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.386703] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b7d2ca-7516-482e-80ad-c03f6dfb9fdb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.393589] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 832.393853] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-015bdc6f-27b6-4272-9b94-1a32aa619938 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.396219] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.396439] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 832.397479] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4671138-4563-4dfa-ab14-c5efe78b67b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.402328] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Waiting for the task: (returnval){ [ 832.402328] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f836a9-0407-f292-2581-c80278000848" [ 832.402328] env[62507]: _type = "Task" [ 832.402328] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.416110] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f836a9-0407-f292-2581-c80278000848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.462065] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 832.462586] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 832.462586] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Deleting the datastore file [datastore2] c31b20a2-11aa-4d64-a8c6-2d8f889f1560 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.462708] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79346750-96b6-4ab2-858b-b5dab2d7c26e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.468573] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Waiting for the task: (returnval){ [ 832.468573] env[62507]: value = "task-2459969" [ 832.468573] env[62507]: _type = "Task" [ 832.468573] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.476827] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Task: {'id': task-2459969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.913646] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 832.913892] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Creating directory with path [datastore2] vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.914134] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6f56ecb-99ea-414d-a4bf-138d33f119a1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.925366] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Created directory with path [datastore2] vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.925543] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Fetch image to [datastore2] vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 832.925714] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 832.926409] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7718065b-eb37-4e08-8196-234873979111 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.932621] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664f9686-f9df-48a1-9806-5e10b6fa2879 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.941516] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859e442a-8847-45df-a659-d848acd3d546 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.973950] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b72b2d-43ca-4603-9895-485de57ce7c0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.980346] env[62507]: DEBUG oslo_vmware.api [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Task: {'id': task-2459969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068256} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.981676] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.981863] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 832.982045] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 832.982225] env[62507]: INFO nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Took 0.60 seconds to destroy the instance on the hypervisor. [ 832.983945] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e03cba25-2898-4256-a940-8ca9f1278c16 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.985740] env[62507]: DEBUG nova.compute.claims [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 832.985935] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.986187] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.006906] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 833.062860] env[62507]: DEBUG oslo_vmware.rw_handles [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 833.121893] env[62507]: DEBUG oslo_vmware.rw_handles [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 833.122094] env[62507]: DEBUG oslo_vmware.rw_handles [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 833.359890] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9349d0d8-79f3-42ec-a4ff-8b7cdb79e761 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.367533] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52239f8f-77f0-44da-8968-5f4a991c12c1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.397612] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5990985-42e4-45bc-9cc5-dc364764a6a7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.404539] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d3206f-98ec-4c2c-abeb-860eb8260320 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.420214] env[62507]: DEBUG nova.compute.provider_tree [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.428844] env[62507]: DEBUG nova.scheduler.client.report [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 833.442990] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.457s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.443542] env[62507]: ERROR nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 833.443542] env[62507]: Faults: ['InvalidArgument'] [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Traceback (most recent call last): [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self.driver.spawn(context, instance, image_meta, [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self._fetch_image_if_missing(context, vi) [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] image_cache(vi, tmp_image_ds_loc) [ 833.443542] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] vm_util.copy_virtual_disk( [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] session._wait_for_task(vmdk_copy_task) [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] return self.wait_for_task(task_ref) [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] return evt.wait() [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] result = hub.switch() [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] return self.greenlet.switch() [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 833.443887] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] self.f(*self.args, **self.kw) [ 833.444258] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 833.444258] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] raise exceptions.translate_fault(task_info.error) [ 833.444258] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 833.444258] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Faults: ['InvalidArgument'] [ 833.444258] env[62507]: ERROR nova.compute.manager [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] [ 833.444258] env[62507]: DEBUG nova.compute.utils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 833.445625] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Build of instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 was re-scheduled: A specified parameter was not correct: fileType [ 833.445625] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 833.446024] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 833.446221] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 833.446401] env[62507]: DEBUG nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 833.446565] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.845878] env[62507]: DEBUG nova.network.neutron [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.856472] env[62507]: INFO nova.compute.manager [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Took 0.41 seconds to deallocate network for instance. [ 833.972793] env[62507]: INFO nova.scheduler.client.report [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Deleted allocations for instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 [ 833.992216] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fad7908a-b6c1-4372-a5f2-476744f1f7b2 tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 283.622s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.993336] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 85.426s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.993563] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Acquiring lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.995621] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.995621] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.996126] env[62507]: INFO nova.compute.manager [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Terminating instance [ 834.005130] env[62507]: DEBUG nova.compute.manager [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 834.005130] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 834.005130] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-393557e0-7fd0-483c-a2c0-6998a25b1a29 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.007433] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e9dfab-ebdf-4167-b984-76dae6a6c22d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.018248] env[62507]: DEBUG nova.compute.manager [None req-d1b461bd-95cc-4456-8da9-c6a08f85f887 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] [instance: 1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.041665] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c31b20a2-11aa-4d64-a8c6-2d8f889f1560 could not be found. [ 834.041876] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 834.042069] env[62507]: INFO nova.compute.manager [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Took 0.04 seconds to destroy the instance on the hypervisor. [ 834.042332] env[62507]: DEBUG oslo.service.loopingcall [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.042569] env[62507]: DEBUG nova.compute.manager [-] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 834.042669] env[62507]: DEBUG nova.network.neutron [-] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 834.045326] env[62507]: DEBUG nova.compute.manager [None req-d1b461bd-95cc-4456-8da9-c6a08f85f887 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] [instance: 1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.064403] env[62507]: DEBUG nova.network.neutron [-] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.070425] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d1b461bd-95cc-4456-8da9-c6a08f85f887 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Lock "1b8bc5c8-1f3a-409b-bdbc-bdad8e1966e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.114s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.072567] env[62507]: INFO nova.compute.manager [-] [instance: c31b20a2-11aa-4d64-a8c6-2d8f889f1560] Took 0.03 seconds to deallocate network for instance. [ 834.079577] env[62507]: DEBUG nova.compute.manager [None req-71381752-6e58-4e46-b0b4-137662f3c991 tempest-ServerMetadataTestJSON-665996216 tempest-ServerMetadataTestJSON-665996216-project-member] [instance: 0eaa55ee-0619-456f-b35b-469c1ed7897d] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.102163] env[62507]: DEBUG nova.compute.manager [None req-71381752-6e58-4e46-b0b4-137662f3c991 tempest-ServerMetadataTestJSON-665996216 tempest-ServerMetadataTestJSON-665996216-project-member] [instance: 0eaa55ee-0619-456f-b35b-469c1ed7897d] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.139732] env[62507]: DEBUG oslo_concurrency.lockutils [None req-71381752-6e58-4e46-b0b4-137662f3c991 tempest-ServerMetadataTestJSON-665996216 tempest-ServerMetadataTestJSON-665996216-project-member] Lock "0eaa55ee-0619-456f-b35b-469c1ed7897d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.501s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.151837] env[62507]: DEBUG nova.compute.manager [None req-818f08ba-daee-425d-ba60-97a50301c6ea tempest-FloatingIPsAssociationTestJSON-1307269260 tempest-FloatingIPsAssociationTestJSON-1307269260-project-member] [instance: bcc0fbd8-e554-488a-8a12-732d7db1a4b0] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.183576] env[62507]: DEBUG nova.compute.manager [None req-818f08ba-daee-425d-ba60-97a50301c6ea tempest-FloatingIPsAssociationTestJSON-1307269260 tempest-FloatingIPsAssociationTestJSON-1307269260-project-member] [instance: bcc0fbd8-e554-488a-8a12-732d7db1a4b0] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.192315] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5e679be2-87b8-4b04-bc1e-49810950b44b tempest-ImagesOneServerNegativeTestJSON-1675882151 tempest-ImagesOneServerNegativeTestJSON-1675882151-project-member] Lock "c31b20a2-11aa-4d64-a8c6-2d8f889f1560" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.204344] env[62507]: DEBUG oslo_concurrency.lockutils [None req-818f08ba-daee-425d-ba60-97a50301c6ea tempest-FloatingIPsAssociationTestJSON-1307269260 tempest-FloatingIPsAssociationTestJSON-1307269260-project-member] Lock "bcc0fbd8-e554-488a-8a12-732d7db1a4b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.800s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.211849] env[62507]: DEBUG nova.compute.manager [None req-a203c354-212c-469f-b8b8-abd83a09ada0 tempest-InstanceActionsV221TestJSON-258376973 tempest-InstanceActionsV221TestJSON-258376973-project-member] [instance: 6d751e02-64bb-41bb-9ded-30db9b885c2e] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.233523] env[62507]: DEBUG nova.compute.manager [None req-a203c354-212c-469f-b8b8-abd83a09ada0 tempest-InstanceActionsV221TestJSON-258376973 tempest-InstanceActionsV221TestJSON-258376973-project-member] [instance: 6d751e02-64bb-41bb-9ded-30db9b885c2e] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.252740] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a203c354-212c-469f-b8b8-abd83a09ada0 tempest-InstanceActionsV221TestJSON-258376973 tempest-InstanceActionsV221TestJSON-258376973-project-member] Lock "6d751e02-64bb-41bb-9ded-30db9b885c2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.738s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.264166] env[62507]: DEBUG nova.compute.manager [None req-d8ba961c-4562-40e1-a234-9711d7dc8c66 tempest-ServerActionsV293TestJSON-328365227 tempest-ServerActionsV293TestJSON-328365227-project-member] [instance: ba7b892b-a955-419f-b46e-e9631150a264] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.288920] env[62507]: DEBUG nova.compute.manager [None req-d8ba961c-4562-40e1-a234-9711d7dc8c66 tempest-ServerActionsV293TestJSON-328365227 tempest-ServerActionsV293TestJSON-328365227-project-member] [instance: ba7b892b-a955-419f-b46e-e9631150a264] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.309666] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d8ba961c-4562-40e1-a234-9711d7dc8c66 tempest-ServerActionsV293TestJSON-328365227 tempest-ServerActionsV293TestJSON-328365227-project-member] Lock "ba7b892b-a955-419f-b46e-e9631150a264" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.437s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.318849] env[62507]: DEBUG nova.compute.manager [None req-4174627b-465f-4638-9bb5-2df5203ae66d tempest-ServersTestManualDisk-2088113835 tempest-ServersTestManualDisk-2088113835-project-member] [instance: a1380ba5-64df-4b21-a80b-96c6d9d80f73] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.345750] env[62507]: DEBUG nova.compute.manager [None req-4174627b-465f-4638-9bb5-2df5203ae66d tempest-ServersTestManualDisk-2088113835 tempest-ServersTestManualDisk-2088113835-project-member] [instance: a1380ba5-64df-4b21-a80b-96c6d9d80f73] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.366549] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4174627b-465f-4638-9bb5-2df5203ae66d tempest-ServersTestManualDisk-2088113835 tempest-ServersTestManualDisk-2088113835-project-member] Lock "a1380ba5-64df-4b21-a80b-96c6d9d80f73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.665s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.376351] env[62507]: DEBUG nova.compute.manager [None req-efee469a-ca1a-4f27-95b8-964da8ac7e33 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] [instance: f8dcc4b2-c1f5-42e5-be12-77647f526cb1] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.400032] env[62507]: DEBUG nova.compute.manager [None req-efee469a-ca1a-4f27-95b8-964da8ac7e33 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] [instance: f8dcc4b2-c1f5-42e5-be12-77647f526cb1] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 834.422117] env[62507]: DEBUG oslo_concurrency.lockutils [None req-efee469a-ca1a-4f27-95b8-964da8ac7e33 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] Lock "f8dcc4b2-c1f5-42e5-be12-77647f526cb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.282s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.432466] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 834.480521] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.480779] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.482228] env[62507]: INFO nova.compute.claims [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.800334] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d48998d-4fae-44c7-83e0-5244377a7f14 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.808107] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f1ff85-3c4c-4c8b-933f-8831258af38c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.838403] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5723055-06de-404c-9ad1-bf0648100af6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.845219] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8350ed65-24b6-4a7b-bff2-704bffd8c398 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.857989] env[62507]: DEBUG nova.compute.provider_tree [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.867195] env[62507]: DEBUG nova.scheduler.client.report [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 834.880843] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.400s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.881355] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 834.918438] env[62507]: DEBUG nova.compute.utils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.919893] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 834.920082] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 834.931017] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 834.997804] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 835.015530] env[62507]: DEBUG nova.policy [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcb61a27c14a4be58a1b5ee74a01d29e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c1c58140e2b7497cad5e588753aa6748', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 835.029501] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.029811] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.030039] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.030304] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.030526] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.030757] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.031035] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.031269] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.031501] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.031726] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.031961] env[62507]: DEBUG nova.virt.hardware [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.032855] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9352b13d-5230-4c0b-ad3b-5fd5f1428a85 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.041580] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ac5267-c33a-429d-b60f-e9aca550e3a7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.685239] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Successfully created port: 676bd3e6-8149-47b3-b691-67fc7b9f47e9 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.956353] env[62507]: DEBUG nova.compute.manager [req-cac08382-0ebb-43f0-9c06-4fa7ecb1e8e3 req-999c94a8-4e00-49cb-8b46-a4fae36c7a9b service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Received event network-vif-plugged-676bd3e6-8149-47b3-b691-67fc7b9f47e9 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 836.957280] env[62507]: DEBUG oslo_concurrency.lockutils [req-cac08382-0ebb-43f0-9c06-4fa7ecb1e8e3 req-999c94a8-4e00-49cb-8b46-a4fae36c7a9b service nova] Acquiring lock "e682e67f-5a36-4851-b870-7099d7db119d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.957280] env[62507]: DEBUG oslo_concurrency.lockutils [req-cac08382-0ebb-43f0-9c06-4fa7ecb1e8e3 req-999c94a8-4e00-49cb-8b46-a4fae36c7a9b service nova] Lock "e682e67f-5a36-4851-b870-7099d7db119d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.957512] env[62507]: DEBUG oslo_concurrency.lockutils [req-cac08382-0ebb-43f0-9c06-4fa7ecb1e8e3 req-999c94a8-4e00-49cb-8b46-a4fae36c7a9b service nova] Lock "e682e67f-5a36-4851-b870-7099d7db119d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.957512] env[62507]: DEBUG nova.compute.manager [req-cac08382-0ebb-43f0-9c06-4fa7ecb1e8e3 req-999c94a8-4e00-49cb-8b46-a4fae36c7a9b service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] No waiting events found dispatching network-vif-plugged-676bd3e6-8149-47b3-b691-67fc7b9f47e9 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 836.957726] env[62507]: WARNING nova.compute.manager [req-cac08382-0ebb-43f0-9c06-4fa7ecb1e8e3 req-999c94a8-4e00-49cb-8b46-a4fae36c7a9b service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Received unexpected event network-vif-plugged-676bd3e6-8149-47b3-b691-67fc7b9f47e9 for instance with vm_state building and task_state spawning. [ 837.071044] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Successfully updated port: 676bd3e6-8149-47b3-b691-67fc7b9f47e9 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.087770] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "refresh_cache-e682e67f-5a36-4851-b870-7099d7db119d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.087931] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquired lock "refresh_cache-e682e67f-5a36-4851-b870-7099d7db119d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.088227] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.171347] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.404162] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Updating instance_info_cache with network_info: [{"id": "676bd3e6-8149-47b3-b691-67fc7b9f47e9", "address": "fa:16:3e:0e:f7:0b", "network": {"id": "a6e8cf0e-b7a5-45b3-93d4-30f4385c5f29", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-112638439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1c58140e2b7497cad5e588753aa6748", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97d59ff5-df43-4e01-b11f-6c57b53fb79d", "external-id": "nsx-vlan-transportzone-106", "segmentation_id": 106, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap676bd3e6-81", "ovs_interfaceid": "676bd3e6-8149-47b3-b691-67fc7b9f47e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.423065] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Releasing lock "refresh_cache-e682e67f-5a36-4851-b870-7099d7db119d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.423375] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance network_info: |[{"id": "676bd3e6-8149-47b3-b691-67fc7b9f47e9", "address": "fa:16:3e:0e:f7:0b", "network": {"id": "a6e8cf0e-b7a5-45b3-93d4-30f4385c5f29", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-112638439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1c58140e2b7497cad5e588753aa6748", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97d59ff5-df43-4e01-b11f-6c57b53fb79d", "external-id": "nsx-vlan-transportzone-106", "segmentation_id": 106, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap676bd3e6-81", "ovs_interfaceid": "676bd3e6-8149-47b3-b691-67fc7b9f47e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 837.424218] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:f7:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97d59ff5-df43-4e01-b11f-6c57b53fb79d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '676bd3e6-8149-47b3-b691-67fc7b9f47e9', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.432092] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Creating folder: Project (c1c58140e2b7497cad5e588753aa6748). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 837.432709] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddf4ebf1-13b9-4e94-99e8-5f500ece7e76 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.458638] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Created folder: Project (c1c58140e2b7497cad5e588753aa6748) in parent group-v497991. [ 837.458850] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Creating folder: Instances. Parent ref: group-v498043. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 837.459108] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed78d3dd-d48c-456f-8fd5-6f1c59df2a0d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.468126] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Created folder: Instances in parent group-v498043. [ 837.468368] env[62507]: DEBUG oslo.service.loopingcall [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.468555] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 837.468755] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-100a0401-8cd3-43b6-9846-d608cd37fb9e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.487692] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.487692] env[62507]: value = "task-2459973" [ 837.487692] env[62507]: _type = "Task" [ 837.487692] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.495795] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459973, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.998222] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459973, 'name': CreateVM_Task, 'duration_secs': 0.345227} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.998556] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 837.999099] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.999272] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.999702] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 837.999952] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c62c835a-c531-4ff4-95b1-a330ad9ab14d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.005392] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Waiting for the task: (returnval){ [ 838.005392] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52beb44d-4af6-ccf3-399f-0b970b3241c6" [ 838.005392] env[62507]: _type = "Task" [ 838.005392] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.013273] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52beb44d-4af6-ccf3-399f-0b970b3241c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.517122] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.517436] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.517686] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.350106] env[62507]: DEBUG nova.compute.manager [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Received event network-changed-676bd3e6-8149-47b3-b691-67fc7b9f47e9 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 839.350106] env[62507]: DEBUG nova.compute.manager [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Refreshing instance network info cache due to event network-changed-676bd3e6-8149-47b3-b691-67fc7b9f47e9. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 839.350106] env[62507]: DEBUG oslo_concurrency.lockutils [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] Acquiring lock "refresh_cache-e682e67f-5a36-4851-b870-7099d7db119d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.352024] env[62507]: DEBUG oslo_concurrency.lockutils [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] Acquired lock "refresh_cache-e682e67f-5a36-4851-b870-7099d7db119d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.352024] env[62507]: DEBUG nova.network.neutron [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Refreshing network info cache for port 676bd3e6-8149-47b3-b691-67fc7b9f47e9 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 839.725672] env[62507]: DEBUG nova.network.neutron [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Updated VIF entry in instance network info cache for port 676bd3e6-8149-47b3-b691-67fc7b9f47e9. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 839.725672] env[62507]: DEBUG nova.network.neutron [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Updating instance_info_cache with network_info: [{"id": "676bd3e6-8149-47b3-b691-67fc7b9f47e9", "address": "fa:16:3e:0e:f7:0b", "network": {"id": "a6e8cf0e-b7a5-45b3-93d4-30f4385c5f29", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-112638439-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1c58140e2b7497cad5e588753aa6748", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97d59ff5-df43-4e01-b11f-6c57b53fb79d", "external-id": "nsx-vlan-transportzone-106", "segmentation_id": 106, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap676bd3e6-81", "ovs_interfaceid": "676bd3e6-8149-47b3-b691-67fc7b9f47e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.737785] env[62507]: DEBUG oslo_concurrency.lockutils [req-96ed3e84-c379-4cab-a23e-f43a36015375 req-1ac1e8e0-37a3-4818-8378-b77c3af0f1a7 service nova] Releasing lock "refresh_cache-e682e67f-5a36-4851-b870-7099d7db119d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.759200] env[62507]: DEBUG oslo_concurrency.lockutils [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "e682e67f-5a36-4851-b870-7099d7db119d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.119108] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquiring lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.119432] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.457300] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.457621] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.196584] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.220783] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.167462] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.167694] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.163157] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.168647] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.168953] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 861.168953] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 861.191308] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.191460] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.191595] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.191725] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.191852] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.191975] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.192151] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.192284] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.192405] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.192525] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 861.192645] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 861.193242] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.193344] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.193473] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 862.170218] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.170218] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.181624] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.181952] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.182032] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.182179] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 862.183380] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047e62e7-2667-44cc-8a78-495ba142a1ef {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.192192] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc93601-54b4-4951-abe5-b008805b0545 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.207289] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a308da35-6e84-4b5b-82d7-094094dfec02 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.213587] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc5e20c-cddd-4822-b933-86a0f9642f04 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.242368] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181180MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 862.242510] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.242708] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.313249] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.313488] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.313566] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.313652] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 63f63029-d01a-4d55-9753-95b93b7155cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.313771] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.313889] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.314017] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.314139] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.314255] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.314369] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.325881] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.337523] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.348379] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.358850] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.370263] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 35a9caf1-5cb9-4d34-81ed-e064cfc73456 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.380856] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 140c1da2-016b-45da-8134-90e1d51b81e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.391202] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f75f4ffa-8494-4edf-803f-9fe61b4899b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.400787] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c07010ad-0831-4b46-80ca-4532eb3dac7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.411813] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 92a87da8-3ed1-4d74-9a6e-abb35d69d9ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.421218] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e909a0f-02f7-405b-8a4a-bcf555db245d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.431574] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.441352] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 31ed600c-d84c-4595-aceb-38f5d4e5aaff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.451057] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 36c7ffe9-6a5f-4758-b1e8-36c0330d9a23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.462169] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2fea54d3-0637-4811-9ff3-1a72bc4e08ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.474339] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.474834] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 862.474996] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 862.750039] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a50836b-2a59-47a6-a22f-ba05a2d241e0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.757154] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d619dfab-1e3b-4966-97bc-f5f322666da3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.786374] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95de0d80-4d66-4a22-b444-30608f24ade0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.793412] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c610011-1b75-4bad-a2de-05547614213d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.806017] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.814064] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 862.831026] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 862.831217] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.588s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.873421] env[62507]: WARNING oslo_vmware.rw_handles [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 881.873421] env[62507]: ERROR oslo_vmware.rw_handles [ 881.874090] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 881.875728] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 881.875978] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Copying Virtual Disk [datastore2] vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/53ddbf7f-d3e3-41c0-8b88-a2753bcf8f0d/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 881.876280] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e590f00-979d-407a-a015-5805153193de {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.885199] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Waiting for the task: (returnval){ [ 881.885199] env[62507]: value = "task-2459976" [ 881.885199] env[62507]: _type = "Task" [ 881.885199] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.893595] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Task: {'id': task-2459976, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.395598] env[62507]: DEBUG oslo_vmware.exceptions [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 882.395810] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.396399] env[62507]: ERROR nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 882.396399] env[62507]: Faults: ['InvalidArgument'] [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Traceback (most recent call last): [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] yield resources [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self.driver.spawn(context, instance, image_meta, [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self._fetch_image_if_missing(context, vi) [ 882.396399] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] image_cache(vi, tmp_image_ds_loc) [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] vm_util.copy_virtual_disk( [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] session._wait_for_task(vmdk_copy_task) [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] return self.wait_for_task(task_ref) [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] return evt.wait() [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] result = hub.switch() [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 882.396833] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] return self.greenlet.switch() [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self.f(*self.args, **self.kw) [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] raise exceptions.translate_fault(task_info.error) [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Faults: ['InvalidArgument'] [ 882.397283] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] [ 882.397283] env[62507]: INFO nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Terminating instance [ 882.398273] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.398481] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.398708] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87f27868-43db-4915-a514-2fcdfa46543c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.400764] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 882.400945] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 882.401649] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6730975-a253-4d5c-8959-5cbd0b99a7ce {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.408111] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 882.408314] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53a47ad6-2c53-44c6-a139-83f506b7595a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.410303] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.410478] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 882.411367] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d0ff2dc-6d24-4b74-813a-7d0d06804362 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.415746] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 882.415746] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520dd424-dc3a-f125-819c-0895481158d5" [ 882.415746] env[62507]: _type = "Task" [ 882.415746] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.422579] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520dd424-dc3a-f125-819c-0895481158d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.484567] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 882.484950] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 882.485342] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Deleting the datastore file [datastore2] 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.485649] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f9189b0-30f8-4515-80f3-3470770f70bb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.492012] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Waiting for the task: (returnval){ [ 882.492012] env[62507]: value = "task-2459978" [ 882.492012] env[62507]: _type = "Task" [ 882.492012] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.501063] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Task: {'id': task-2459978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.926293] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 882.926589] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating directory with path [datastore2] vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.926790] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5af26c0-380f-4034-bc8c-bd781236d78e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.939097] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Created directory with path [datastore2] vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.939301] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Fetch image to [datastore2] vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 882.939479] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 882.940328] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42334ae-faca-4107-a818-b5905b535f4b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.947074] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a680b43-15fc-4146-be84-78e2a25b086d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.956047] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1618fb-6c1f-4ab0-8f78-9c2c60754380 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.986581] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb33dad-d619-4b7e-8a29-42f8c5c5cb58 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.995304] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ffda0e10-c2f0-466d-9de4-b17fd73d261b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.001661] env[62507]: DEBUG oslo_vmware.api [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Task: {'id': task-2459978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07057} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.001894] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.002092] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 883.002270] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.002446] env[62507]: INFO nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 883.007010] env[62507]: DEBUG nova.compute.claims [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 883.007095] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.007320] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.015492] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 883.068340] env[62507]: DEBUG oslo_vmware.rw_handles [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 883.128660] env[62507]: DEBUG oslo_vmware.rw_handles [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 883.128844] env[62507]: DEBUG oslo_vmware.rw_handles [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 883.385831] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f622c7-eaa0-49e8-a769-3df6c1adae53 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.393315] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d42b412-dae8-468b-a541-b6123daadbe8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.422998] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b632c3a-7483-4f6a-9d62-8d369813c72f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.430116] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ae131f-823a-4f29-859a-fdc2e9a08163 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.442713] env[62507]: DEBUG nova.compute.provider_tree [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.451550] env[62507]: DEBUG nova.scheduler.client.report [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 883.464865] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.457s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.465426] env[62507]: ERROR nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 883.465426] env[62507]: Faults: ['InvalidArgument'] [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Traceback (most recent call last): [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self.driver.spawn(context, instance, image_meta, [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self._fetch_image_if_missing(context, vi) [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] image_cache(vi, tmp_image_ds_loc) [ 883.465426] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] vm_util.copy_virtual_disk( [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] session._wait_for_task(vmdk_copy_task) [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] return self.wait_for_task(task_ref) [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] return evt.wait() [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] result = hub.switch() [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] return self.greenlet.switch() [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 883.465855] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] self.f(*self.args, **self.kw) [ 883.466301] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 883.466301] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] raise exceptions.translate_fault(task_info.error) [ 883.466301] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 883.466301] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Faults: ['InvalidArgument'] [ 883.466301] env[62507]: ERROR nova.compute.manager [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] [ 883.466301] env[62507]: DEBUG nova.compute.utils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 883.467423] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Build of instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a was re-scheduled: A specified parameter was not correct: fileType [ 883.467423] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 883.467791] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 883.467966] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 883.468148] env[62507]: DEBUG nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 883.468330] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 883.795535] env[62507]: DEBUG nova.network.neutron [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.807330] env[62507]: INFO nova.compute.manager [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Took 0.34 seconds to deallocate network for instance. [ 883.920066] env[62507]: INFO nova.scheduler.client.report [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Deleted allocations for instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a [ 883.941395] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b0cca76b-e892-498d-8203-b9398cf21cd7 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 332.061s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.942487] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 135.092s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.942699] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Acquiring lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.942911] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.944034] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.945046] env[62507]: INFO nova.compute.manager [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Terminating instance [ 883.946713] env[62507]: DEBUG nova.compute.manager [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 883.946900] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 883.947365] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a114b635-5541-4acf-8773-59c5be4d6a34 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.952536] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 883.958818] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a8630f-cf5b-474f-b9b8-f3b8230fb97f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.988057] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a could not be found. [ 883.988430] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.988706] env[62507]: INFO nova.compute.manager [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 883.989057] env[62507]: DEBUG oslo.service.loopingcall [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.989417] env[62507]: DEBUG nova.compute.manager [-] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 883.989615] env[62507]: DEBUG nova.network.neutron [-] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 884.021774] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.022035] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.023518] env[62507]: INFO nova.compute.claims [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.026782] env[62507]: DEBUG nova.network.neutron [-] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.035200] env[62507]: INFO nova.compute.manager [-] [instance: 3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a] Took 0.05 seconds to deallocate network for instance. [ 884.138732] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9ab61149-9074-46ee-b16f-b5f0d046f5b0 tempest-ServerRescueTestJSON-647360806 tempest-ServerRescueTestJSON-647360806-project-member] Lock "3d51c0e1-242e-4cdb-bf8c-4e7aa4d7656a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.196s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.371373] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50eb1905-d03d-44d0-838b-9e134100e413 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.380180] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e79b1a2-5d70-4eb2-a958-7422883311e4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.409881] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3170e90f-c0cd-4814-976d-e898beee02eb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.417172] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0869bc2e-c25e-4bfa-9e70-067f55bd299f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.430248] env[62507]: DEBUG nova.compute.provider_tree [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.440125] env[62507]: DEBUG nova.scheduler.client.report [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 884.453312] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.431s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.453756] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 884.486050] env[62507]: DEBUG nova.compute.utils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.487234] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 884.487437] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 884.495606] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 884.550244] env[62507]: DEBUG nova.policy [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e01bb85ce15d4ae9a5545be49c51c366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '556bcbce3c914145a5cb64cad7d3166b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 884.556621] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 884.582908] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 884.583173] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 884.583334] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.583516] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 884.583662] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.583810] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 884.584127] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 884.584325] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 884.584504] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 884.584669] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 884.584843] env[62507]: DEBUG nova.virt.hardware [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 884.585734] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4067f3-c902-4dd7-9ccc-97ade9679491 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.595189] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993bc9b3-324c-459e-8c23-effdf681c1c1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.963139] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Successfully created port: a39a0794-11f6-47a1-9a3e-dce5f3d78082 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.073452] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Successfully updated port: a39a0794-11f6-47a1-9a3e-dce5f3d78082 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.091116] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "refresh_cache-a9b1ef96-1409-4700-a1bb-4aec1691a0fd" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.091214] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquired lock "refresh_cache-a9b1ef96-1409-4700-a1bb-4aec1691a0fd" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.091447] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 886.184805] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 886.189329] env[62507]: DEBUG nova.compute.manager [req-fc1d6fda-e2f5-4558-9569-a568939e4c0d req-98b05f6b-1796-4d6d-8be6-24f78be4fc72 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Received event network-vif-plugged-a39a0794-11f6-47a1-9a3e-dce5f3d78082 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 886.189579] env[62507]: DEBUG oslo_concurrency.lockutils [req-fc1d6fda-e2f5-4558-9569-a568939e4c0d req-98b05f6b-1796-4d6d-8be6-24f78be4fc72 service nova] Acquiring lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.189831] env[62507]: DEBUG oslo_concurrency.lockutils [req-fc1d6fda-e2f5-4558-9569-a568939e4c0d req-98b05f6b-1796-4d6d-8be6-24f78be4fc72 service nova] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.190055] env[62507]: DEBUG oslo_concurrency.lockutils [req-fc1d6fda-e2f5-4558-9569-a568939e4c0d req-98b05f6b-1796-4d6d-8be6-24f78be4fc72 service nova] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.190441] env[62507]: DEBUG nova.compute.manager [req-fc1d6fda-e2f5-4558-9569-a568939e4c0d req-98b05f6b-1796-4d6d-8be6-24f78be4fc72 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] No waiting events found dispatching network-vif-plugged-a39a0794-11f6-47a1-9a3e-dce5f3d78082 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 886.190441] env[62507]: WARNING nova.compute.manager [req-fc1d6fda-e2f5-4558-9569-a568939e4c0d req-98b05f6b-1796-4d6d-8be6-24f78be4fc72 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Received unexpected event network-vif-plugged-a39a0794-11f6-47a1-9a3e-dce5f3d78082 for instance with vm_state building and task_state spawning. [ 886.555096] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Updating instance_info_cache with network_info: [{"id": "a39a0794-11f6-47a1-9a3e-dce5f3d78082", "address": "fa:16:3e:cb:02:de", "network": {"id": "621369f4-bdfd-4601-885d-add486152d77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-119688579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "556bcbce3c914145a5cb64cad7d3166b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa39a0794-11", "ovs_interfaceid": "a39a0794-11f6-47a1-9a3e-dce5f3d78082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.576660] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Releasing lock "refresh_cache-a9b1ef96-1409-4700-a1bb-4aec1691a0fd" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.576660] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance network_info: |[{"id": "a39a0794-11f6-47a1-9a3e-dce5f3d78082", "address": "fa:16:3e:cb:02:de", "network": {"id": "621369f4-bdfd-4601-885d-add486152d77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-119688579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "556bcbce3c914145a5cb64cad7d3166b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa39a0794-11", "ovs_interfaceid": "a39a0794-11f6-47a1-9a3e-dce5f3d78082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 886.576956] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:02:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a39a0794-11f6-47a1-9a3e-dce5f3d78082', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.585182] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Creating folder: Project (556bcbce3c914145a5cb64cad7d3166b). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 886.585805] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6410287d-4a0a-4a78-a810-9e0670a57451 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.599492] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Created folder: Project (556bcbce3c914145a5cb64cad7d3166b) in parent group-v497991. [ 886.602016] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Creating folder: Instances. Parent ref: group-v498046. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 886.602016] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b95354be-873b-4c87-98ff-7b9d4472f842 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.608940] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Created folder: Instances in parent group-v498046. [ 886.609184] env[62507]: DEBUG oslo.service.loopingcall [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.609361] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 886.609553] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-211414ce-c6f7-4ef8-b862-dbe2a2c6945f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.630320] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.630320] env[62507]: value = "task-2459981" [ 886.630320] env[62507]: _type = "Task" [ 886.630320] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.639829] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459981, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.140257] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459981, 'name': CreateVM_Task, 'duration_secs': 0.295537} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.140491] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 887.141219] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.141385] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.141725] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 887.141971] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f5d9fe5-af9d-4e01-9298-12cf7a172ac9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.146927] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Waiting for the task: (returnval){ [ 887.146927] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52bccc4a-582e-d0de-344d-91fa5eb8f952" [ 887.146927] env[62507]: _type = "Task" [ 887.146927] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.157018] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52bccc4a-582e-d0de-344d-91fa5eb8f952, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.657470] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.657752] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.657979] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.364075] env[62507]: DEBUG nova.compute.manager [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Received event network-changed-a39a0794-11f6-47a1-9a3e-dce5f3d78082 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 888.364328] env[62507]: DEBUG nova.compute.manager [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Refreshing instance network info cache due to event network-changed-a39a0794-11f6-47a1-9a3e-dce5f3d78082. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 888.364503] env[62507]: DEBUG oslo_concurrency.lockutils [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] Acquiring lock "refresh_cache-a9b1ef96-1409-4700-a1bb-4aec1691a0fd" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.364671] env[62507]: DEBUG oslo_concurrency.lockutils [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] Acquired lock "refresh_cache-a9b1ef96-1409-4700-a1bb-4aec1691a0fd" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.364833] env[62507]: DEBUG nova.network.neutron [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Refreshing network info cache for port a39a0794-11f6-47a1-9a3e-dce5f3d78082 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 889.001740] env[62507]: DEBUG nova.network.neutron [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Updated VIF entry in instance network info cache for port a39a0794-11f6-47a1-9a3e-dce5f3d78082. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 889.002105] env[62507]: DEBUG nova.network.neutron [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Updating instance_info_cache with network_info: [{"id": "a39a0794-11f6-47a1-9a3e-dce5f3d78082", "address": "fa:16:3e:cb:02:de", "network": {"id": "621369f4-bdfd-4601-885d-add486152d77", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-119688579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "556bcbce3c914145a5cb64cad7d3166b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa39a0794-11", "ovs_interfaceid": "a39a0794-11f6-47a1-9a3e-dce5f3d78082", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.017372] env[62507]: DEBUG oslo_concurrency.lockutils [req-867764f7-2c0d-4b99-9913-257366c25d21 req-2782a7e3-e1a3-4952-adc6-fd9dac47d314 service nova] Releasing lock "refresh_cache-a9b1ef96-1409-4700-a1bb-4aec1691a0fd" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.230582] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "b53bed7e-5e76-4aa5-abe2-b05750497404" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.230912] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.649142] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.830867] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.168840] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.169112] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.164022] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.168040] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.168040] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 922.168040] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 922.189642] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.189741] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.189880] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190015] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190147] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190271] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190391] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190512] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190630] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190749] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 922.190867] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 922.191379] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.191536] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 923.168011] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.167504] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.167752] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.179664] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.179911] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.180075] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.180237] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 924.181391] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703d7b8f-b3ed-4f07-b0d3-a14995b246bb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.190537] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88f813f-4418-4a27-9433-8de74668cd27 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.204487] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b2f02d-5093-4a23-80fd-2f75d8e81424 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.210982] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6019c01-3de2-4f55-8395-2657c499877a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.241048] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181172MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 924.241205] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.241393] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.310324] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.310492] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.310627] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 63f63029-d01a-4d55-9753-95b93b7155cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.310753] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.310875] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.310997] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.311124] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.311242] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.311360] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.311475] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 924.322439] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.332923] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.342883] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.352866] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 35a9caf1-5cb9-4d34-81ed-e064cfc73456 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.362677] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 140c1da2-016b-45da-8134-90e1d51b81e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.372526] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f75f4ffa-8494-4edf-803f-9fe61b4899b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.382590] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c07010ad-0831-4b46-80ca-4532eb3dac7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.392337] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 92a87da8-3ed1-4d74-9a6e-abb35d69d9ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.401855] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e909a0f-02f7-405b-8a4a-bcf555db245d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.412565] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.422306] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 31ed600c-d84c-4595-aceb-38f5d4e5aaff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.432386] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 36c7ffe9-6a5f-4758-b1e8-36c0330d9a23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.443938] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2fea54d3-0637-4811-9ff3-1a72bc4e08ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.452925] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.481382] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.481647] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 924.481868] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 924.759251] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e831cf-8821-4881-8509-44f2bf7c995e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.766928] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df84383-a02d-45de-bbea-ef1398b23dc2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.798044] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3211ce-957d-4ef0-9239-2a6c25e19d78 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.804208] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5bb475-352f-4c3b-92b6-ecbb8503e211 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.817086] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.825382] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 924.839773] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 924.839963] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.599s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.891788] env[62507]: WARNING oslo_vmware.rw_handles [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 931.891788] env[62507]: ERROR oslo_vmware.rw_handles [ 931.892436] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 931.894588] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 931.894898] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Copying Virtual Disk [datastore2] vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/9c764444-bfd1-4eb0-babb-bcbd6fbf2cb2/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 931.895301] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0df2fd6e-525f-4b2b-947e-94ad5312e68b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.903861] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 931.903861] env[62507]: value = "task-2459982" [ 931.903861] env[62507]: _type = "Task" [ 931.903861] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.911816] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': task-2459982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.415428] env[62507]: DEBUG oslo_vmware.exceptions [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 932.415733] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.416313] env[62507]: ERROR nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 932.416313] env[62507]: Faults: ['InvalidArgument'] [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Traceback (most recent call last): [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] yield resources [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self.driver.spawn(context, instance, image_meta, [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self._fetch_image_if_missing(context, vi) [ 932.416313] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] image_cache(vi, tmp_image_ds_loc) [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] vm_util.copy_virtual_disk( [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] session._wait_for_task(vmdk_copy_task) [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] return self.wait_for_task(task_ref) [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] return evt.wait() [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] result = hub.switch() [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 932.416638] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] return self.greenlet.switch() [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self.f(*self.args, **self.kw) [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] raise exceptions.translate_fault(task_info.error) [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Faults: ['InvalidArgument'] [ 932.417177] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] [ 932.417177] env[62507]: INFO nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Terminating instance [ 932.418345] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.418553] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.419317] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 932.419393] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 932.419589] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1541f301-4a41-49f4-92d2-386350bf51aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.422360] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1485a09d-c8e0-4988-b551-467b99ff15ce {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.429403] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 932.429662] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c895ba49-f64c-490d-accb-b6c79a152145 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.432169] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.432345] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 932.433426] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a83b049c-90fa-4cb5-95f7-53e31dcafd40 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.437993] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Waiting for the task: (returnval){ [ 932.437993] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5269f686-32ea-d202-d304-e7322a8cc258" [ 932.437993] env[62507]: _type = "Task" [ 932.437993] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.446728] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5269f686-32ea-d202-d304-e7322a8cc258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.501433] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 932.501626] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 932.501787] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Deleting the datastore file [datastore2] 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.502064] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-157cd296-f9a6-4fe2-b47a-2d861bc4797a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.509356] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 932.509356] env[62507]: value = "task-2459984" [ 932.509356] env[62507]: _type = "Task" [ 932.509356] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.517554] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': task-2459984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.948132] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 932.948419] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Creating directory with path [datastore2] vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.948700] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a7c155d-2dfa-4e7d-af06-52ed84c33738 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.960660] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Created directory with path [datastore2] vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.960905] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Fetch image to [datastore2] vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 932.961105] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 932.962616] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ae3f26-e07e-4189-b4c9-98959909c9c0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.969302] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8515ea56-47ac-4802-af68-8057cfec7aad {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.979233] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f3e0ac-1cf7-4bb1-934d-633d608db4f0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.014984] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3384538-4d83-4c22-83e0-b1f728c40651 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.022866] env[62507]: DEBUG oslo_vmware.api [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': task-2459984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083439} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.024081] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.024190] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 933.024483] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 933.024547] env[62507]: INFO nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 933.026404] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-88965c2f-561b-4ee9-bad9-3f5db6d13591 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.028771] env[62507]: DEBUG nova.compute.claims [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 933.028946] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.029173] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.051012] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 933.117349] env[62507]: DEBUG oslo_vmware.rw_handles [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 933.183440] env[62507]: DEBUG oslo_vmware.rw_handles [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 933.183440] env[62507]: DEBUG oslo_vmware.rw_handles [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 933.449008] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc04aaee-2e6a-483a-a788-b126e623e647 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.456988] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11364967-5283-406a-9787-e0fc2503f930 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.487795] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d7de67-356a-4121-8e8c-fe08b173cbc2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.494854] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49839fc6-b576-44d4-b204-9b71fd81112e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.508086] env[62507]: DEBUG nova.compute.provider_tree [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.517014] env[62507]: DEBUG nova.scheduler.client.report [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 933.535993] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.507s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.536545] env[62507]: ERROR nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 933.536545] env[62507]: Faults: ['InvalidArgument'] [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Traceback (most recent call last): [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self.driver.spawn(context, instance, image_meta, [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self._fetch_image_if_missing(context, vi) [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] image_cache(vi, tmp_image_ds_loc) [ 933.536545] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] vm_util.copy_virtual_disk( [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] session._wait_for_task(vmdk_copy_task) [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] return self.wait_for_task(task_ref) [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] return evt.wait() [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] result = hub.switch() [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] return self.greenlet.switch() [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 933.536854] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] self.f(*self.args, **self.kw) [ 933.537161] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 933.537161] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] raise exceptions.translate_fault(task_info.error) [ 933.537161] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 933.537161] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Faults: ['InvalidArgument'] [ 933.537161] env[62507]: ERROR nova.compute.manager [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] [ 933.537356] env[62507]: DEBUG nova.compute.utils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 933.538822] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Build of instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 was re-scheduled: A specified parameter was not correct: fileType [ 933.538822] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 933.539218] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 933.539388] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 933.539543] env[62507]: DEBUG nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 933.539706] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 934.005847] env[62507]: DEBUG nova.network.neutron [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.019952] env[62507]: INFO nova.compute.manager [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Took 0.48 seconds to deallocate network for instance. [ 934.125755] env[62507]: INFO nova.scheduler.client.report [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Deleted allocations for instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 [ 934.148952] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98460eea-0e03-487d-bb82-db4cbf15f2fa tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 377.529s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.150159] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 178.776s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.150385] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.150590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.151905] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.153058] env[62507]: INFO nova.compute.manager [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Terminating instance [ 934.155388] env[62507]: DEBUG nova.compute.manager [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 934.155586] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 934.156360] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98bbd468-31a7-4d65-94cf-571ddebf9952 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.161811] env[62507]: DEBUG nova.compute.manager [None req-a38be65d-2feb-4cfc-ad9f-48e9cd6d8b1a tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 934.168649] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f887b4ea-9219-459c-a370-2ca2b160a0b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.199632] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3 could not be found. [ 934.199913] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 934.200350] env[62507]: INFO nova.compute.manager [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 934.200695] env[62507]: DEBUG oslo.service.loopingcall [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.201111] env[62507]: DEBUG nova.compute.manager [None req-a38be65d-2feb-4cfc-ad9f-48e9cd6d8b1a tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] [instance: 26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 934.202825] env[62507]: DEBUG nova.compute.manager [-] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 934.202908] env[62507]: DEBUG nova.network.neutron [-] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 934.231186] env[62507]: DEBUG nova.network.neutron [-] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.233633] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a38be65d-2feb-4cfc-ad9f-48e9cd6d8b1a tempest-DeleteServersAdminTestJSON-695666790 tempest-DeleteServersAdminTestJSON-695666790-project-member] Lock "26bd2ef7-5d8b-4924-9bc6-2dfc7c59e667" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.866s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.242193] env[62507]: INFO nova.compute.manager [-] [instance: 58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3] Took 0.04 seconds to deallocate network for instance. [ 934.245180] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 934.299662] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.300275] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.301701] env[62507]: INFO nova.compute.claims [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.343434] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a7e3fcef-de84-4781-8059-66933c155d4b tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "58d7e626-35a8-4c3d-bb34-11d1f6a0d2e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.620207] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b0f1ee-fa9e-449c-a77c-0fcc67acde96 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.627947] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932fbcc5-de3e-4d96-9075-e13d6d66a657 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.657092] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c497f8-83bc-4a41-85b8-c584e77a4c34 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.664378] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1a3b5f-9c23-4157-b0b4-c49997abeb23 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.677319] env[62507]: DEBUG nova.compute.provider_tree [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.687044] env[62507]: DEBUG nova.scheduler.client.report [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 934.701322] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.701813] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 934.738173] env[62507]: DEBUG nova.compute.utils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.742794] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 934.742794] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 934.753320] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 934.821047] env[62507]: DEBUG nova.policy [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '156af0071802455d9cf233b60f1761f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '884c06ca36464159847e4a452154a873', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 934.831706] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 934.853959] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.854289] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.854358] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.854659] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.854659] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.854797] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.854999] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.855173] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.855342] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.855556] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.855752] env[62507]: DEBUG nova.virt.hardware [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.856691] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b395ce1-0cac-4494-a011-18788e1e7ef4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.865182] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0341acb8-6c61-445c-9aa8-293b885b8b38 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.108946] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cefe5265-47e3-4f46-8a41-a3997bd6a45d tempest-ServerDiagnosticsTest-1845157807 tempest-ServerDiagnosticsTest-1845157807-project-member] Acquiring lock "a68f6eb0-a549-4c52-b349-bcbc8e2b8669" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.108946] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cefe5265-47e3-4f46-8a41-a3997bd6a45d tempest-ServerDiagnosticsTest-1845157807 tempest-ServerDiagnosticsTest-1845157807-project-member] Lock "a68f6eb0-a549-4c52-b349-bcbc8e2b8669" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.287817] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Successfully created port: adcbba83-2638-481f-a760-4d8d0979f10a {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.145231] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Successfully updated port: adcbba83-2638-481f-a760-4d8d0979f10a {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.155117] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "refresh_cache-01d865c8-ed85-45ec-aac6-bf923cd52dfa" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.155117] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired lock "refresh_cache-01d865c8-ed85-45ec-aac6-bf923cd52dfa" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.155117] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 936.212051] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 936.267487] env[62507]: DEBUG nova.compute.manager [req-70f07f59-3aca-4ffd-a783-fb1ced45688f req-0b1e0f3a-903f-418f-9420-65f4640eb531 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Received event network-vif-plugged-adcbba83-2638-481f-a760-4d8d0979f10a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 936.267748] env[62507]: DEBUG oslo_concurrency.lockutils [req-70f07f59-3aca-4ffd-a783-fb1ced45688f req-0b1e0f3a-903f-418f-9420-65f4640eb531 service nova] Acquiring lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.267896] env[62507]: DEBUG oslo_concurrency.lockutils [req-70f07f59-3aca-4ffd-a783-fb1ced45688f req-0b1e0f3a-903f-418f-9420-65f4640eb531 service nova] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.268074] env[62507]: DEBUG oslo_concurrency.lockutils [req-70f07f59-3aca-4ffd-a783-fb1ced45688f req-0b1e0f3a-903f-418f-9420-65f4640eb531 service nova] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.268240] env[62507]: DEBUG nova.compute.manager [req-70f07f59-3aca-4ffd-a783-fb1ced45688f req-0b1e0f3a-903f-418f-9420-65f4640eb531 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] No waiting events found dispatching network-vif-plugged-adcbba83-2638-481f-a760-4d8d0979f10a {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 936.268465] env[62507]: WARNING nova.compute.manager [req-70f07f59-3aca-4ffd-a783-fb1ced45688f req-0b1e0f3a-903f-418f-9420-65f4640eb531 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Received unexpected event network-vif-plugged-adcbba83-2638-481f-a760-4d8d0979f10a for instance with vm_state building and task_state spawning. [ 936.484162] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Updating instance_info_cache with network_info: [{"id": "adcbba83-2638-481f-a760-4d8d0979f10a", "address": "fa:16:3e:e4:e4:9b", "network": {"id": "e1d5d969-2a6a-4fe4-a020-7cfcf85e69ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-524905620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "884c06ca36464159847e4a452154a873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadcbba83-26", "ovs_interfaceid": "adcbba83-2638-481f-a760-4d8d0979f10a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.501255] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Releasing lock "refresh_cache-01d865c8-ed85-45ec-aac6-bf923cd52dfa" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.501568] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance network_info: |[{"id": "adcbba83-2638-481f-a760-4d8d0979f10a", "address": "fa:16:3e:e4:e4:9b", "network": {"id": "e1d5d969-2a6a-4fe4-a020-7cfcf85e69ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-524905620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "884c06ca36464159847e4a452154a873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadcbba83-26", "ovs_interfaceid": "adcbba83-2638-481f-a760-4d8d0979f10a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 936.502009] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:e4:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'adcbba83-2638-481f-a760-4d8d0979f10a', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.509648] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating folder: Project (884c06ca36464159847e4a452154a873). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 936.511180] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6aac7cf0-4a33-4fc8-96c2-b21f93050df0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.515748] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.515977] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.526066] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Created folder: Project (884c06ca36464159847e4a452154a873) in parent group-v497991. [ 936.526256] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating folder: Instances. Parent ref: group-v498049. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 936.526478] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0af3daf-8a22-42db-a17a-e5fb94258c46 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.536443] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Created folder: Instances in parent group-v498049. [ 936.536757] env[62507]: DEBUG oslo.service.loopingcall [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.536986] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 936.537226] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86d4a226-bd24-4e96-bb90-7899a5a30340 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.556801] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.556801] env[62507]: value = "task-2459987" [ 936.556801] env[62507]: _type = "Task" [ 936.556801] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.564327] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459987, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.068146] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459987, 'name': CreateVM_Task} progress is 99%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.568719] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459987, 'name': CreateVM_Task} progress is 99%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.068687] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459987, 'name': CreateVM_Task, 'duration_secs': 1.357633} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.068879] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 938.069604] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.069727] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.070074] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.070326] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1e39a37-5838-4649-897e-219c622d57af {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.074749] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 938.074749] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52837278-5c40-7631-a8d9-7b74736250d0" [ 938.074749] env[62507]: _type = "Task" [ 938.074749] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.082325] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52837278-5c40-7631-a8d9-7b74736250d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.293515] env[62507]: DEBUG nova.compute.manager [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Received event network-changed-adcbba83-2638-481f-a760-4d8d0979f10a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 938.293731] env[62507]: DEBUG nova.compute.manager [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Refreshing instance network info cache due to event network-changed-adcbba83-2638-481f-a760-4d8d0979f10a. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 938.293969] env[62507]: DEBUG oslo_concurrency.lockutils [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] Acquiring lock "refresh_cache-01d865c8-ed85-45ec-aac6-bf923cd52dfa" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.294321] env[62507]: DEBUG oslo_concurrency.lockutils [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] Acquired lock "refresh_cache-01d865c8-ed85-45ec-aac6-bf923cd52dfa" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.294501] env[62507]: DEBUG nova.network.neutron [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Refreshing network info cache for port adcbba83-2638-481f-a760-4d8d0979f10a {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 938.584427] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.584696] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.584902] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.798068] env[62507]: DEBUG nova.network.neutron [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Updated VIF entry in instance network info cache for port adcbba83-2638-481f-a760-4d8d0979f10a. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 938.798373] env[62507]: DEBUG nova.network.neutron [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Updating instance_info_cache with network_info: [{"id": "adcbba83-2638-481f-a760-4d8d0979f10a", "address": "fa:16:3e:e4:e4:9b", "network": {"id": "e1d5d969-2a6a-4fe4-a020-7cfcf85e69ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-524905620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "884c06ca36464159847e4a452154a873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadcbba83-26", "ovs_interfaceid": "adcbba83-2638-481f-a760-4d8d0979f10a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.808493] env[62507]: DEBUG oslo_concurrency.lockutils [req-2644e42f-bcd8-49ae-b722-142c82326a78 req-732bbbfb-11bf-4277-a8fb-dafb41ffb0e7 service nova] Releasing lock "refresh_cache-01d865c8-ed85-45ec-aac6-bf923cd52dfa" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.304509] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9def2483-ef27-449f-a3aa-911eed733f41 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "dcf96348-5199-4c3f-9661-5ac0924c5b96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.305049] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9def2483-ef27-449f-a3aa-911eed733f41 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "dcf96348-5199-4c3f-9661-5ac0924c5b96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.398393] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85e399cb-eacf-418f-a4b1-f6ea361ab803 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "9da52346-c500-4335-8f4c-39cf56322589" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.398627] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85e399cb-eacf-418f-a4b1-f6ea361ab803 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "9da52346-c500-4335-8f4c-39cf56322589" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.899453] env[62507]: DEBUG oslo_concurrency.lockutils [None req-57ba5bb7-7a1c-4746-a7c7-a42f35068b52 tempest-ImagesNegativeTestJSON-1013885382 tempest-ImagesNegativeTestJSON-1013885382-project-member] Acquiring lock "b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.899665] env[62507]: DEBUG oslo_concurrency.lockutils [None req-57ba5bb7-7a1c-4746-a7c7-a42f35068b52 tempest-ImagesNegativeTestJSON-1013885382 tempest-ImagesNegativeTestJSON-1013885382-project-member] Lock "b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.252879] env[62507]: DEBUG oslo_concurrency.lockutils [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.313342] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7617d66a-5f99-416c-a17b-4d4dcdc6b99d tempest-ServerPasswordTestJSON-239737907 tempest-ServerPasswordTestJSON-239737907-project-member] Acquiring lock "9e1c954f-3a25-46f8-a34b-9fa859053951" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.313342] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7617d66a-5f99-416c-a17b-4d4dcdc6b99d tempest-ServerPasswordTestJSON-239737907 tempest-ServerPasswordTestJSON-239737907-project-member] Lock "9e1c954f-3a25-46f8-a34b-9fa859053951" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.112514] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.840850] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.162720] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.709127] env[62507]: WARNING oslo_vmware.rw_handles [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 979.709127] env[62507]: ERROR oslo_vmware.rw_handles [ 979.709573] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 979.711818] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 979.712137] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Copying Virtual Disk [datastore2] vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/e6ce4ab3-1b48-4e59-a1b3-1d16e25856a9/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 979.712505] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01026821-8f26-46e4-9cf2-f079a6335d05 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.721349] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Waiting for the task: (returnval){ [ 979.721349] env[62507]: value = "task-2459988" [ 979.721349] env[62507]: _type = "Task" [ 979.721349] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.729681] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Task: {'id': task-2459988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.169169] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.231869] env[62507]: DEBUG oslo_vmware.exceptions [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 980.232190] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.232714] env[62507]: ERROR nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 980.232714] env[62507]: Faults: ['InvalidArgument'] [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Traceback (most recent call last): [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] yield resources [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self.driver.spawn(context, instance, image_meta, [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self._fetch_image_if_missing(context, vi) [ 980.232714] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] image_cache(vi, tmp_image_ds_loc) [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] vm_util.copy_virtual_disk( [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] session._wait_for_task(vmdk_copy_task) [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] return self.wait_for_task(task_ref) [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] return evt.wait() [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] result = hub.switch() [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 980.233024] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] return self.greenlet.switch() [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self.f(*self.args, **self.kw) [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] raise exceptions.translate_fault(task_info.error) [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Faults: ['InvalidArgument'] [ 980.233351] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] [ 980.233351] env[62507]: INFO nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Terminating instance [ 980.234661] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.234878] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.235135] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c361377-5f29-47e7-8857-11b3f5c60bf9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.237358] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 980.237553] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 980.238304] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82242312-3004-4e3b-8647-957c83c64b2c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.245484] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 980.245695] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6deddd0-9406-4d6c-8e6d-5155069adc9c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.247900] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.248089] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 980.249041] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-167424f7-6c44-45c7-b03b-f2606304535d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.254137] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Waiting for the task: (returnval){ [ 980.254137] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f82df7-7287-5f2b-2573-66c1d9bf7f9f" [ 980.254137] env[62507]: _type = "Task" [ 980.254137] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.261546] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f82df7-7287-5f2b-2573-66c1d9bf7f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.330615] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 980.330828] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 980.331132] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Deleting the datastore file [datastore2] 63f63029-d01a-4d55-9753-95b93b7155cf {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 980.331296] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4634a1c8-99cf-4618-9735-ca10dbb8020f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.338666] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Waiting for the task: (returnval){ [ 980.338666] env[62507]: value = "task-2459990" [ 980.338666] env[62507]: _type = "Task" [ 980.338666] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.349067] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Task: {'id': task-2459990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.764918] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 980.765214] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Creating directory with path [datastore2] vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.765452] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cf435d3-8def-4233-a60a-4620cc0b3038 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.848093] env[62507]: DEBUG oslo_vmware.api [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Task: {'id': task-2459990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088366} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.849117] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.849307] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 980.849480] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 980.849667] env[62507]: INFO nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 980.851274] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Created directory with path [datastore2] vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.851451] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Fetch image to [datastore2] vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 980.851622] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 980.852149] env[62507]: DEBUG nova.compute.claims [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 980.852372] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.852589] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.855607] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bd1d12-ea21-4086-a2e8-8b391aecac13 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.862792] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb722ba7-6370-4a72-8cb5-33b331130f77 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.871767] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f4a130-f9f4-4dcf-b7a2-fe636015213c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.902840] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34320ffb-e95f-4d81-92e4-034b52d2b1ca {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.913145] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c8263ef1-f106-46b3-8249-2e61fb124794 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.933487] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 980.981776] env[62507]: DEBUG oslo_vmware.rw_handles [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 981.045060] env[62507]: DEBUG oslo_vmware.rw_handles [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 981.045265] env[62507]: DEBUG oslo_vmware.rw_handles [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 981.167424] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.292371] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e696c1b-6bb5-4507-9f2f-c1c1f0bd0a9a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.299882] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b75180-1e93-4248-bb7f-da9e3bb08f7f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.330733] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcdfdb29-59d3-4a16-b411-162048b083e5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.337873] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8184e5-d639-4dff-ad55-0dd1a396310a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.350673] env[62507]: DEBUG nova.compute.provider_tree [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.359394] env[62507]: DEBUG nova.scheduler.client.report [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 981.374314] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.522s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.374833] env[62507]: ERROR nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 981.374833] env[62507]: Faults: ['InvalidArgument'] [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Traceback (most recent call last): [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self.driver.spawn(context, instance, image_meta, [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self._fetch_image_if_missing(context, vi) [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] image_cache(vi, tmp_image_ds_loc) [ 981.374833] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] vm_util.copy_virtual_disk( [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] session._wait_for_task(vmdk_copy_task) [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] return self.wait_for_task(task_ref) [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] return evt.wait() [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] result = hub.switch() [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] return self.greenlet.switch() [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 981.375193] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] self.f(*self.args, **self.kw) [ 981.375623] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 981.375623] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] raise exceptions.translate_fault(task_info.error) [ 981.375623] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 981.375623] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Faults: ['InvalidArgument'] [ 981.375623] env[62507]: ERROR nova.compute.manager [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] [ 981.375623] env[62507]: DEBUG nova.compute.utils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 981.377359] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Build of instance 63f63029-d01a-4d55-9753-95b93b7155cf was re-scheduled: A specified parameter was not correct: fileType [ 981.377359] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 981.377723] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 981.377892] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 981.378074] env[62507]: DEBUG nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 981.378274] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 981.933322] env[62507]: DEBUG nova.network.neutron [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.945266] env[62507]: INFO nova.compute.manager [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Took 0.57 seconds to deallocate network for instance. [ 982.033758] env[62507]: INFO nova.scheduler.client.report [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Deleted allocations for instance 63f63029-d01a-4d55-9753-95b93b7155cf [ 982.054659] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b8213983-3599-4583-8a2c-30d9c63fb71c tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "63f63029-d01a-4d55-9753-95b93b7155cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 418.898s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.055878] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "63f63029-d01a-4d55-9753-95b93b7155cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 220.355s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.056358] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Acquiring lock "63f63029-d01a-4d55-9753-95b93b7155cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.056424] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "63f63029-d01a-4d55-9753-95b93b7155cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.056607] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "63f63029-d01a-4d55-9753-95b93b7155cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.058853] env[62507]: INFO nova.compute.manager [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Terminating instance [ 982.060586] env[62507]: DEBUG nova.compute.manager [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 982.060784] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 982.061278] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ec5104a-2072-4c36-8fd6-407732292976 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.066304] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 982.072497] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4f159e-6b92-4b73-9ed8-5bff9f3e1cf8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.103988] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63f63029-d01a-4d55-9753-95b93b7155cf could not be found. [ 982.104109] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 982.104294] env[62507]: INFO nova.compute.manager [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 982.104536] env[62507]: DEBUG oslo.service.loopingcall [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.106783] env[62507]: DEBUG nova.compute.manager [-] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 982.106879] env[62507]: DEBUG nova.network.neutron [-] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 982.120812] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.120812] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.122158] env[62507]: INFO nova.compute.claims [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.137099] env[62507]: DEBUG nova.network.neutron [-] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.162781] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.165019] env[62507]: INFO nova.compute.manager [-] [instance: 63f63029-d01a-4d55-9753-95b93b7155cf] Took 0.06 seconds to deallocate network for instance. [ 982.166845] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.166998] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 982.167130] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 982.186794] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187037] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187199] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187328] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187451] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187570] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187687] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187936] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.187936] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.188066] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 982.188156] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 982.259692] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02481543-bc1e-4655-bd6a-c7a57ed37d98 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "63f63029-d01a-4d55-9753-95b93b7155cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.518997] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a429d5e-d8fd-43e0-b27f-43c9e4b1a8ff {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.526611] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad306f94-d76f-4f00-be0e-bcd90ab1c538 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.556097] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a972987-9139-4485-a2ee-2a623cfd92b4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.562693] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffde9bc-6400-41cd-9187-9111ea5dc718 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.576289] env[62507]: DEBUG nova.compute.provider_tree [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.585472] env[62507]: DEBUG nova.scheduler.client.report [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 982.598771] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.478s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.611362] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "df721b58-f6f9-45d4-afee-855876694990" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.611587] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "df721b58-f6f9-45d4-afee-855876694990" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.616824] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "df721b58-f6f9-45d4-afee-855876694990" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.005s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.617526] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 982.650945] env[62507]: DEBUG nova.compute.utils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.652802] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 982.653108] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 982.661622] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 982.724928] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 982.736890] env[62507]: DEBUG nova.policy [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7bdce1574074ee197a52c2792811764', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0a5f3c950b444a78c8124460fd8c9e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 982.754047] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 982.754047] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 982.754047] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.754287] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 982.754287] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.754287] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 982.754287] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 982.754287] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 982.754425] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 982.754809] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 982.755152] env[62507]: DEBUG nova.virt.hardware [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 982.756131] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d03abde-5e5a-4c9e-8779-9ff32adfedcd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.764459] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313c76fa-2e16-450f-a230-455aad175675 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.168322] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.168782] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 983.179651] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Successfully created port: 8e7ce10e-f36a-4202-a6f3-936ed87ee47a {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.011162] env[62507]: DEBUG nova.compute.manager [req-ff574d39-1bb1-4b41-b597-807d3c973e1d req-15fce2a3-dd25-4ce2-a94d-a2d6f6910593 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Received event network-vif-plugged-8e7ce10e-f36a-4202-a6f3-936ed87ee47a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 984.011162] env[62507]: DEBUG oslo_concurrency.lockutils [req-ff574d39-1bb1-4b41-b597-807d3c973e1d req-15fce2a3-dd25-4ce2-a94d-a2d6f6910593 service nova] Acquiring lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.011162] env[62507]: DEBUG oslo_concurrency.lockutils [req-ff574d39-1bb1-4b41-b597-807d3c973e1d req-15fce2a3-dd25-4ce2-a94d-a2d6f6910593 service nova] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.011162] env[62507]: DEBUG oslo_concurrency.lockutils [req-ff574d39-1bb1-4b41-b597-807d3c973e1d req-15fce2a3-dd25-4ce2-a94d-a2d6f6910593 service nova] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.012164] env[62507]: DEBUG nova.compute.manager [req-ff574d39-1bb1-4b41-b597-807d3c973e1d req-15fce2a3-dd25-4ce2-a94d-a2d6f6910593 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] No waiting events found dispatching network-vif-plugged-8e7ce10e-f36a-4202-a6f3-936ed87ee47a {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 984.012164] env[62507]: WARNING nova.compute.manager [req-ff574d39-1bb1-4b41-b597-807d3c973e1d req-15fce2a3-dd25-4ce2-a94d-a2d6f6910593 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Received unexpected event network-vif-plugged-8e7ce10e-f36a-4202-a6f3-936ed87ee47a for instance with vm_state building and task_state spawning. [ 984.013762] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Successfully updated port: 8e7ce10e-f36a-4202-a6f3-936ed87ee47a {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.025325] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "refresh_cache-b866307e-f0e9-40d0-8603-fbfb9e2ee15a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.025511] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquired lock "refresh_cache-b866307e-f0e9-40d0-8603-fbfb9e2ee15a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.025683] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 984.093399] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 984.275562] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Updating instance_info_cache with network_info: [{"id": "8e7ce10e-f36a-4202-a6f3-936ed87ee47a", "address": "fa:16:3e:3e:ec:13", "network": {"id": "4b82b55f-0863-4d0d-8b24-78dad7a02e53", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-30069240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0a5f3c950b444a78c8124460fd8c9e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e7ce10e-f3", "ovs_interfaceid": "8e7ce10e-f36a-4202-a6f3-936ed87ee47a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.290353] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Releasing lock "refresh_cache-b866307e-f0e9-40d0-8603-fbfb9e2ee15a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.290646] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance network_info: |[{"id": "8e7ce10e-f36a-4202-a6f3-936ed87ee47a", "address": "fa:16:3e:3e:ec:13", "network": {"id": "4b82b55f-0863-4d0d-8b24-78dad7a02e53", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-30069240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0a5f3c950b444a78c8124460fd8c9e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e7ce10e-f3", "ovs_interfaceid": "8e7ce10e-f36a-4202-a6f3-936ed87ee47a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 984.291061] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:ec:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e7ce10e-f36a-4202-a6f3-936ed87ee47a', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.300392] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Creating folder: Project (a0a5f3c950b444a78c8124460fd8c9e9). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 984.300940] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ba7f62a-fc57-43a6-b9ab-61f93ee83bff {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.312031] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Created folder: Project (a0a5f3c950b444a78c8124460fd8c9e9) in parent group-v497991. [ 984.312225] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Creating folder: Instances. Parent ref: group-v498052. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 984.312440] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22d18ce6-4805-4a7d-a702-507255a25ef1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.320503] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Created folder: Instances in parent group-v498052. [ 984.320727] env[62507]: DEBUG oslo.service.loopingcall [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.320901] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 984.321097] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-488ef06a-abd1-4262-aa85-1896ae248d48 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.339457] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.339457] env[62507]: value = "task-2459993" [ 984.339457] env[62507]: _type = "Task" [ 984.339457] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.348356] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459993, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.850469] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459993, 'name': CreateVM_Task, 'duration_secs': 0.290506} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.850469] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 984.850627] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.850779] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.851113] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.851348] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8efc1b6-72b5-4c5b-9dc1-f73cbd4455b6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.855630] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Waiting for the task: (returnval){ [ 984.855630] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52535367-3633-b81f-9a72-b24fd387be31" [ 984.855630] env[62507]: _type = "Task" [ 984.855630] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.863305] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52535367-3633-b81f-9a72-b24fd387be31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.167766] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.168116] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.168215] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.183898] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.184145] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.184530] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.184769] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 985.186423] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d6be04-792f-417f-90fa-c4362e2906e0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.196211] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8f9b79-776c-40e4-b6a7-1c140ec7f1e3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.212237] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb76421-02cb-43e2-8883-d73a4338faa8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.219732] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b282665-b181-4a38-bf4e-740480b417ed {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.250141] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181067MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 985.250443] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.250779] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.344366] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dc241495-c9b7-4f2f-895d-e25008cc738a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.344658] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.344888] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.345101] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.345289] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.345463] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.345637] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.345811] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.346386] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.346630] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.364417] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 35a9caf1-5cb9-4d34-81ed-e064cfc73456 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.375139] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.375689] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.376278] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.378281] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 140c1da2-016b-45da-8134-90e1d51b81e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.392758] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f75f4ffa-8494-4edf-803f-9fe61b4899b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.405481] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c07010ad-0831-4b46-80ca-4532eb3dac7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.418763] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 92a87da8-3ed1-4d74-9a6e-abb35d69d9ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.431193] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e909a0f-02f7-405b-8a4a-bcf555db245d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.444148] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.456583] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 31ed600c-d84c-4595-aceb-38f5d4e5aaff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.475072] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 36c7ffe9-6a5f-4758-b1e8-36c0330d9a23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.484582] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2fea54d3-0637-4811-9ff3-1a72bc4e08ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.501014] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.513251] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.529604] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a68f6eb0-a549-4c52-b349-bcbc8e2b8669 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.541591] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.556257] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dcf96348-5199-4c3f-9661-5ac0924c5b96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.568972] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9da52346-c500-4335-8f4c-39cf56322589 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.580412] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.594025] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9e1c954f-3a25-46f8-a34b-9fa859053951 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.594025] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 985.594025] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 986.066795] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696764f7-3e90-43d3-af47-1bc891ba3054 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.075751] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae747e1-5a42-44b2-a64d-bd6b4052c00a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.109473] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0e75df-6442-44dc-91cb-33ee2132b11f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.117652] env[62507]: DEBUG nova.compute.manager [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Received event network-changed-8e7ce10e-f36a-4202-a6f3-936ed87ee47a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 986.117652] env[62507]: DEBUG nova.compute.manager [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Refreshing instance network info cache due to event network-changed-8e7ce10e-f36a-4202-a6f3-936ed87ee47a. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 986.117652] env[62507]: DEBUG oslo_concurrency.lockutils [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] Acquiring lock "refresh_cache-b866307e-f0e9-40d0-8603-fbfb9e2ee15a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.117652] env[62507]: DEBUG oslo_concurrency.lockutils [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] Acquired lock "refresh_cache-b866307e-f0e9-40d0-8603-fbfb9e2ee15a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.117652] env[62507]: DEBUG nova.network.neutron [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Refreshing network info cache for port 8e7ce10e-f36a-4202-a6f3-936ed87ee47a {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.121854] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61271a8b-1f63-4467-abde-cd3ed275dc72 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.143319] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.154438] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 986.187155] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 986.187503] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.937s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.698656] env[62507]: DEBUG nova.network.neutron [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Updated VIF entry in instance network info cache for port 8e7ce10e-f36a-4202-a6f3-936ed87ee47a. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 986.699220] env[62507]: DEBUG nova.network.neutron [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Updating instance_info_cache with network_info: [{"id": "8e7ce10e-f36a-4202-a6f3-936ed87ee47a", "address": "fa:16:3e:3e:ec:13", "network": {"id": "4b82b55f-0863-4d0d-8b24-78dad7a02e53", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-30069240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0a5f3c950b444a78c8124460fd8c9e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e7ce10e-f3", "ovs_interfaceid": "8e7ce10e-f36a-4202-a6f3-936ed87ee47a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.716510] env[62507]: DEBUG oslo_concurrency.lockutils [req-cc785b86-234f-4547-9d0f-ca5175c21419 req-99b35a59-c150-4a71-96e7-cd5e4b54ee92 service nova] Releasing lock "refresh_cache-b866307e-f0e9-40d0-8603-fbfb9e2ee15a" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.950500] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.679183] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "3627bbf7-507f-4345-b093-3b4f5bb45eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.679463] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.493310] env[62507]: DEBUG oslo_concurrency.lockutils [None req-414f98db-771f-487b-99dc-05a2fed6786e tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "b86fd157-ec5a-4e61-967a-c7cdd86bfea1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.493528] env[62507]: DEBUG oslo_concurrency.lockutils [None req-414f98db-771f-487b-99dc-05a2fed6786e tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "b86fd157-ec5a-4e61-967a-c7cdd86bfea1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.660366] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b797e5c-45e9-4ae5-a441-60c74e701749 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Acquiring lock "2e7dfd95-dc72-4dd8-9602-dd1af3d330a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.660638] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b797e5c-45e9-4ae5-a441-60c74e701749 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Lock "2e7dfd95-dc72-4dd8-9602-dd1af3d330a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.829652] env[62507]: DEBUG oslo_concurrency.lockutils [None req-86683ef0-6915-4f80-8e2e-6f87664e8e9f tempest-ServersListShow296Test-1840877548 tempest-ServersListShow296Test-1840877548-project-member] Acquiring lock "ef130396-4736-4601-9024-6f562d5af828" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.829876] env[62507]: DEBUG oslo_concurrency.lockutils [None req-86683ef0-6915-4f80-8e2e-6f87664e8e9f tempest-ServersListShow296Test-1840877548 tempest-ServersListShow296Test-1840877548-project-member] Lock "ef130396-4736-4601-9024-6f562d5af828" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.609144] env[62507]: DEBUG oslo_concurrency.lockutils [None req-edec3c59-0b0b-4727-998e-764e4c7fc22b tempest-ServersNegativeTestJSON-1851693462 tempest-ServersNegativeTestJSON-1851693462-project-member] Acquiring lock "7b49dd64-781d-48c8-ac86-0c523b39f99a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.609144] env[62507]: DEBUG oslo_concurrency.lockutils [None req-edec3c59-0b0b-4727-998e-764e4c7fc22b tempest-ServersNegativeTestJSON-1851693462 tempest-ServersNegativeTestJSON-1851693462-project-member] Lock "7b49dd64-781d-48c8-ac86-0c523b39f99a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.820816] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e71dd7a0-8f9c-4644-8ab8-3e90d84cda88 tempest-ServerAddressesTestJSON-899176983 tempest-ServerAddressesTestJSON-899176983-project-member] Acquiring lock "f70eaaec-66d0-4ec0-b947-3eaa9d6038ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.821420] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e71dd7a0-8f9c-4644-8ab8-3e90d84cda88 tempest-ServerAddressesTestJSON-899176983 tempest-ServerAddressesTestJSON-899176983-project-member] Lock "f70eaaec-66d0-4ec0-b947-3eaa9d6038ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.728482] env[62507]: WARNING oslo_vmware.rw_handles [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1029.728482] env[62507]: ERROR oslo_vmware.rw_handles [ 1029.729272] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1029.730958] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1029.731239] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Copying Virtual Disk [datastore2] vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/9ad65e3c-feeb-4693-ad56-fb4e55647c7f/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1029.731549] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5883d34-353b-44d6-8e80-26e22d7c5745 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.739855] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Waiting for the task: (returnval){ [ 1029.739855] env[62507]: value = "task-2459994" [ 1029.739855] env[62507]: _type = "Task" [ 1029.739855] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.748465] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Task: {'id': task-2459994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.250108] env[62507]: DEBUG oslo_vmware.exceptions [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1030.250406] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.250966] env[62507]: ERROR nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1030.250966] env[62507]: Faults: ['InvalidArgument'] [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Traceback (most recent call last): [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] yield resources [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self.driver.spawn(context, instance, image_meta, [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self._fetch_image_if_missing(context, vi) [ 1030.250966] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] image_cache(vi, tmp_image_ds_loc) [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] vm_util.copy_virtual_disk( [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] session._wait_for_task(vmdk_copy_task) [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] return self.wait_for_task(task_ref) [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] return evt.wait() [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] result = hub.switch() [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1030.251581] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] return self.greenlet.switch() [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self.f(*self.args, **self.kw) [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] raise exceptions.translate_fault(task_info.error) [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Faults: ['InvalidArgument'] [ 1030.252139] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] [ 1030.252139] env[62507]: INFO nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Terminating instance [ 1030.252921] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.253155] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.253407] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6a50dfb-fc16-4af2-a9e7-679d2f1687b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.255533] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1030.255722] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1030.256484] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce2baed-c5dd-49a7-961c-e484cd564ebe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.263230] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1030.263473] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f6cf440-817c-430a-ae0e-08d069a677e7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.265671] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.265823] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1030.266808] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d61c84fc-f010-4d57-8aa2-4f6caa6140bf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.271325] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 1030.271325] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]524f5c6e-5043-336a-6ba9-36bb7e088562" [ 1030.271325] env[62507]: _type = "Task" [ 1030.271325] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.278266] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]524f5c6e-5043-336a-6ba9-36bb7e088562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.329470] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1030.329702] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1030.329883] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Deleting the datastore file [datastore2] dc241495-c9b7-4f2f-895d-e25008cc738a {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.330177] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7e0b862-aa28-4003-a490-32e47d86429c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.336558] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Waiting for the task: (returnval){ [ 1030.336558] env[62507]: value = "task-2459996" [ 1030.336558] env[62507]: _type = "Task" [ 1030.336558] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.345477] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Task: {'id': task-2459996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.781401] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1030.781703] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating directory with path [datastore2] vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.781903] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f779449-0ba4-44db-8224-55f44869deb1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.797152] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Created directory with path [datastore2] vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.798028] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Fetch image to [datastore2] vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1030.798028] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1030.798327] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b16f2d-7729-4d0a-84b4-fcc1244e1194 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.805665] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194e82e7-d5a8-4d73-b1c2-3d9151868541 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.813849] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26932c9b-051c-42a6-8111-34aed2a6ff91 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.846919] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151f7e10-c8e3-4832-acca-825c1b5d3af2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.855605] env[62507]: DEBUG oslo_vmware.api [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Task: {'id': task-2459996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07539} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.855807] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9a0c6b0b-58c5-426d-8f0c-e060e2ac2825 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.857498] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.857688] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1030.857864] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1030.858083] env[62507]: INFO nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1030.860571] env[62507]: DEBUG nova.compute.claims [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1030.860755] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.860980] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.879802] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1030.931696] env[62507]: DEBUG oslo_vmware.rw_handles [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1030.991982] env[62507]: DEBUG oslo_vmware.rw_handles [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1030.992176] env[62507]: DEBUG oslo_vmware.rw_handles [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1031.212695] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6ac920-1da9-449b-b87d-0517a6c6fd86 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.220726] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614c34fc-554a-41e7-9f9c-b33bcfb5d43d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.250960] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191d82d6-f347-4c59-839a-df0ec04075f4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.257974] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce80d6b-a6d2-484d-bc7a-a4dfa1e66fed {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.270678] env[62507]: DEBUG nova.compute.provider_tree [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.279621] env[62507]: DEBUG nova.scheduler.client.report [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1031.294059] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.433s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.294588] env[62507]: ERROR nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1031.294588] env[62507]: Faults: ['InvalidArgument'] [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Traceback (most recent call last): [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self.driver.spawn(context, instance, image_meta, [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self._fetch_image_if_missing(context, vi) [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] image_cache(vi, tmp_image_ds_loc) [ 1031.294588] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] vm_util.copy_virtual_disk( [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] session._wait_for_task(vmdk_copy_task) [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] return self.wait_for_task(task_ref) [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] return evt.wait() [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] result = hub.switch() [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] return self.greenlet.switch() [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1031.294919] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] self.f(*self.args, **self.kw) [ 1031.295262] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1031.295262] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] raise exceptions.translate_fault(task_info.error) [ 1031.295262] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1031.295262] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Faults: ['InvalidArgument'] [ 1031.295262] env[62507]: ERROR nova.compute.manager [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] [ 1031.295399] env[62507]: DEBUG nova.compute.utils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1031.296740] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Build of instance dc241495-c9b7-4f2f-895d-e25008cc738a was re-scheduled: A specified parameter was not correct: fileType [ 1031.296740] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1031.297144] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1031.297330] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1031.297489] env[62507]: DEBUG nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1031.297653] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1031.667699] env[62507]: DEBUG nova.network.neutron [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.683005] env[62507]: INFO nova.compute.manager [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Took 0.39 seconds to deallocate network for instance. [ 1031.769190] env[62507]: INFO nova.scheduler.client.report [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Deleted allocations for instance dc241495-c9b7-4f2f-895d-e25008cc738a [ 1031.792402] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5c2cf616-c6a8-48bf-acf5-f21994fa81d9 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 469.538s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.793594] env[62507]: DEBUG oslo_concurrency.lockutils [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 270.903s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.793811] env[62507]: DEBUG oslo_concurrency.lockutils [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Acquiring lock "dc241495-c9b7-4f2f-895d-e25008cc738a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.794046] env[62507]: DEBUG oslo_concurrency.lockutils [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.794261] env[62507]: DEBUG oslo_concurrency.lockutils [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.796311] env[62507]: INFO nova.compute.manager [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Terminating instance [ 1031.797994] env[62507]: DEBUG nova.compute.manager [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1031.798219] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1031.798699] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16c380fb-536b-4ad3-ac9c-74b27ae45807 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.808096] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63b478c-d990-451e-95ef-6db3c6e89a59 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.818466] env[62507]: DEBUG nova.compute.manager [None req-c5e0a62f-8755-4709-9ddb-da7421e3e9fe tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 35a9caf1-5cb9-4d34-81ed-e064cfc73456] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1031.841479] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dc241495-c9b7-4f2f-895d-e25008cc738a could not be found. [ 1031.841695] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1031.841874] env[62507]: INFO nova.compute.manager [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1031.842138] env[62507]: DEBUG oslo.service.loopingcall [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.842395] env[62507]: DEBUG nova.compute.manager [-] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1031.842490] env[62507]: DEBUG nova.network.neutron [-] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1031.845475] env[62507]: DEBUG nova.compute.manager [None req-c5e0a62f-8755-4709-9ddb-da7421e3e9fe tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 35a9caf1-5cb9-4d34-81ed-e064cfc73456] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1031.868307] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5e0a62f-8755-4709-9ddb-da7421e3e9fe tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "35a9caf1-5cb9-4d34-81ed-e064cfc73456" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.507s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.876020] env[62507]: DEBUG nova.network.neutron [-] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.880828] env[62507]: DEBUG nova.compute.manager [None req-ee0d6ce3-8177-419f-b03e-3833dacab755 tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] [instance: 140c1da2-016b-45da-8134-90e1d51b81e5] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1031.884352] env[62507]: INFO nova.compute.manager [-] [instance: dc241495-c9b7-4f2f-895d-e25008cc738a] Took 0.04 seconds to deallocate network for instance. [ 1031.909561] env[62507]: DEBUG nova.compute.manager [None req-ee0d6ce3-8177-419f-b03e-3833dacab755 tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] [instance: 140c1da2-016b-45da-8134-90e1d51b81e5] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1031.933762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0d6ce3-8177-419f-b03e-3833dacab755 tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] Lock "140c1da2-016b-45da-8134-90e1d51b81e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.883s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.945098] env[62507]: DEBUG nova.compute.manager [None req-464e09f8-b0df-4663-9c3e-41f2fde84eb1 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: f75f4ffa-8494-4edf-803f-9fe61b4899b5] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1031.971188] env[62507]: DEBUG nova.compute.manager [None req-464e09f8-b0df-4663-9c3e-41f2fde84eb1 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: f75f4ffa-8494-4edf-803f-9fe61b4899b5] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1031.990235] env[62507]: DEBUG oslo_concurrency.lockutils [None req-41ed31c4-a158-4d66-acaf-507dac36cc28 tempest-ServerExternalEventsTest-1683589731 tempest-ServerExternalEventsTest-1683589731-project-member] Lock "dc241495-c9b7-4f2f-895d-e25008cc738a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.998441] env[62507]: DEBUG oslo_concurrency.lockutils [None req-464e09f8-b0df-4663-9c3e-41f2fde84eb1 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "f75f4ffa-8494-4edf-803f-9fe61b4899b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.436s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.008692] env[62507]: DEBUG nova.compute.manager [None req-105792b1-c35c-49c4-8b6d-f0b7c2f2e029 tempest-ServerActionsTestOtherB-1636251776 tempest-ServerActionsTestOtherB-1636251776-project-member] [instance: c07010ad-0831-4b46-80ca-4532eb3dac7a] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.034319] env[62507]: DEBUG nova.compute.manager [None req-105792b1-c35c-49c4-8b6d-f0b7c2f2e029 tempest-ServerActionsTestOtherB-1636251776 tempest-ServerActionsTestOtherB-1636251776-project-member] [instance: c07010ad-0831-4b46-80ca-4532eb3dac7a] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1032.052547] env[62507]: DEBUG oslo_concurrency.lockutils [None req-105792b1-c35c-49c4-8b6d-f0b7c2f2e029 tempest-ServerActionsTestOtherB-1636251776 tempest-ServerActionsTestOtherB-1636251776-project-member] Lock "c07010ad-0831-4b46-80ca-4532eb3dac7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.134s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.060124] env[62507]: DEBUG nova.compute.manager [None req-b1634efe-e42d-4af9-abe3-ab5fb821e8ef tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] [instance: 92a87da8-3ed1-4d74-9a6e-abb35d69d9ad] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.081795] env[62507]: DEBUG nova.compute.manager [None req-b1634efe-e42d-4af9-abe3-ab5fb821e8ef tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] [instance: 92a87da8-3ed1-4d74-9a6e-abb35d69d9ad] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1032.100870] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b1634efe-e42d-4af9-abe3-ab5fb821e8ef tempest-ServerRescueNegativeTestJSON-1387522986 tempest-ServerRescueNegativeTestJSON-1387522986-project-member] Lock "92a87da8-3ed1-4d74-9a6e-abb35d69d9ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.608s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.110403] env[62507]: DEBUG nova.compute.manager [None req-8f10a1b7-343d-4deb-8359-71b91b4ae960 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] [instance: 8e909a0f-02f7-405b-8a4a-bcf555db245d] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.131791] env[62507]: DEBUG nova.compute.manager [None req-8f10a1b7-343d-4deb-8359-71b91b4ae960 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] [instance: 8e909a0f-02f7-405b-8a4a-bcf555db245d] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1032.150669] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f10a1b7-343d-4deb-8359-71b91b4ae960 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Lock "8e909a0f-02f7-405b-8a4a-bcf555db245d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.933s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.158127] env[62507]: DEBUG nova.compute.manager [None req-564b7422-5e0a-4690-9a9d-dd7010e3fed4 tempest-ServersAaction247Test-1837452585 tempest-ServersAaction247Test-1837452585-project-member] [instance: 3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.181698] env[62507]: DEBUG nova.compute.manager [None req-564b7422-5e0a-4690-9a9d-dd7010e3fed4 tempest-ServersAaction247Test-1837452585 tempest-ServersAaction247Test-1837452585-project-member] [instance: 3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1032.201373] env[62507]: DEBUG oslo_concurrency.lockutils [None req-564b7422-5e0a-4690-9a9d-dd7010e3fed4 tempest-ServersAaction247Test-1837452585 tempest-ServersAaction247Test-1837452585-project-member] Lock "3ff410eb-b7f7-4735-b20f-b6f4a59bfc8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.988s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.210073] env[62507]: DEBUG nova.compute.manager [None req-257ab6f6-ec26-40b3-827d-bd9f4eedcd99 tempest-ServersNegativeTestMultiTenantJSON-288137237 tempest-ServersNegativeTestMultiTenantJSON-288137237-project-member] [instance: 31ed600c-d84c-4595-aceb-38f5d4e5aaff] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.232837] env[62507]: DEBUG nova.compute.manager [None req-257ab6f6-ec26-40b3-827d-bd9f4eedcd99 tempest-ServersNegativeTestMultiTenantJSON-288137237 tempest-ServersNegativeTestMultiTenantJSON-288137237-project-member] [instance: 31ed600c-d84c-4595-aceb-38f5d4e5aaff] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1032.252585] env[62507]: DEBUG oslo_concurrency.lockutils [None req-257ab6f6-ec26-40b3-827d-bd9f4eedcd99 tempest-ServersNegativeTestMultiTenantJSON-288137237 tempest-ServersNegativeTestMultiTenantJSON-288137237-project-member] Lock "31ed600c-d84c-4595-aceb-38f5d4e5aaff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.792s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.261403] env[62507]: DEBUG nova.compute.manager [None req-98a06325-6868-4eb7-9e7d-4b618a321e21 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] [instance: 36c7ffe9-6a5f-4758-b1e8-36c0330d9a23] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.283600] env[62507]: DEBUG nova.compute.manager [None req-98a06325-6868-4eb7-9e7d-4b618a321e21 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] [instance: 36c7ffe9-6a5f-4758-b1e8-36c0330d9a23] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1032.308522] env[62507]: DEBUG oslo_concurrency.lockutils [None req-98a06325-6868-4eb7-9e7d-4b618a321e21 tempest-AttachVolumeTestJSON-1962259627 tempest-AttachVolumeTestJSON-1962259627-project-member] Lock "36c7ffe9-6a5f-4758-b1e8-36c0330d9a23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.067s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.316636] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1032.366145] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.366434] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.368095] env[62507]: INFO nova.compute.claims [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.698439] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9338a4a1-311c-4f14-82a0-73b966bbda72 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.706149] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0410b3d4-d3cf-40cb-805d-de8c3cde8799 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.736964] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9820828-f549-4a12-8b5e-914312897417 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.744620] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062cfaec-4045-43c0-a559-7448336a5e2e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.758678] env[62507]: DEBUG nova.compute.provider_tree [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.766176] env[62507]: DEBUG nova.scheduler.client.report [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1032.781163] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.781163] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1032.818140] env[62507]: DEBUG nova.compute.utils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.819480] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1032.819653] env[62507]: DEBUG nova.network.neutron [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1032.829734] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1032.868216] env[62507]: INFO nova.virt.block_device [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Booting with volume 3d365cd2-36ad-430c-b85c-eca284e00e28 at /dev/sda [ 1032.886992] env[62507]: DEBUG nova.policy [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40eeb64f47ae49fda72aa5aec382d982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdd3bff4958346c19a8ce90fdb044a18', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1032.915749] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e05c280-1575-429a-b7f8-cda539b4d372 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.924070] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f468c51-d743-419d-89c4-75788a86461b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.952039] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-299f076d-749a-4a1a-a9ed-37e07f398461 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.959464] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448e0084-bfed-40e2-96e2-b431abe8d0ba {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.987153] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a483187-b8ef-4550-8c26-cf14760d2693 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.993372] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2bfd4d-0140-44fe-9082-f61090ecf927 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.008211] env[62507]: DEBUG nova.virt.block_device [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Updating existing volume attachment record: 5f3e2b44-38bb-45e2-b6b2-37053e17e07f {{(pid=62507) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1033.219985] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1033.220662] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1033.220914] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1033.221085] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.221275] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1033.221421] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.221567] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1033.221767] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1033.221924] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1033.222106] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1033.222273] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1033.222450] env[62507]: DEBUG nova.virt.hardware [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1033.223584] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e919e8d-15d0-4e82-b500-f38876c01a30 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.233252] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315e8e7a-8d5e-40f1-8416-c603cace9d1c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.261014] env[62507]: DEBUG nova.network.neutron [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Successfully created port: 1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.932425] env[62507]: DEBUG nova.compute.manager [req-d83315db-7046-4bde-ae65-4b8c02cc1bbf req-e42e6659-2dec-4b45-858b-1ace0966e25d service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Received event network-vif-plugged-1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1033.932714] env[62507]: DEBUG oslo_concurrency.lockutils [req-d83315db-7046-4bde-ae65-4b8c02cc1bbf req-e42e6659-2dec-4b45-858b-1ace0966e25d service nova] Acquiring lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.932848] env[62507]: DEBUG oslo_concurrency.lockutils [req-d83315db-7046-4bde-ae65-4b8c02cc1bbf req-e42e6659-2dec-4b45-858b-1ace0966e25d service nova] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.933069] env[62507]: DEBUG oslo_concurrency.lockutils [req-d83315db-7046-4bde-ae65-4b8c02cc1bbf req-e42e6659-2dec-4b45-858b-1ace0966e25d service nova] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.933195] env[62507]: DEBUG nova.compute.manager [req-d83315db-7046-4bde-ae65-4b8c02cc1bbf req-e42e6659-2dec-4b45-858b-1ace0966e25d service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] No waiting events found dispatching network-vif-plugged-1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1033.933359] env[62507]: WARNING nova.compute.manager [req-d83315db-7046-4bde-ae65-4b8c02cc1bbf req-e42e6659-2dec-4b45-858b-1ace0966e25d service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Received unexpected event network-vif-plugged-1df9ee9a-286d-4a99-87db-f8e975007555 for instance with vm_state building and task_state spawning. [ 1034.044976] env[62507]: DEBUG nova.network.neutron [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Successfully updated port: 1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.056064] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquiring lock "refresh_cache-2fea54d3-0637-4811-9ff3-1a72bc4e08ec" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.056219] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquired lock "refresh_cache-2fea54d3-0637-4811-9ff3-1a72bc4e08ec" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.056391] env[62507]: DEBUG nova.network.neutron [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1034.105209] env[62507]: DEBUG nova.network.neutron [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1034.334342] env[62507]: DEBUG nova.network.neutron [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Updating instance_info_cache with network_info: [{"id": "1df9ee9a-286d-4a99-87db-f8e975007555", "address": "fa:16:3e:5f:ea:df", "network": {"id": "e209fdf6-9c8e-4224-ada0-79d2d251c5f0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-156044785-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdd3bff4958346c19a8ce90fdb044a18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "674802e7-b847-4bef-a7a8-f90ac7a3a0a7", "external-id": "nsx-vlan-transportzone-953", "segmentation_id": 953, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1df9ee9a-28", "ovs_interfaceid": "1df9ee9a-286d-4a99-87db-f8e975007555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.349364] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Releasing lock "refresh_cache-2fea54d3-0637-4811-9ff3-1a72bc4e08ec" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.349719] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Instance network_info: |[{"id": "1df9ee9a-286d-4a99-87db-f8e975007555", "address": "fa:16:3e:5f:ea:df", "network": {"id": "e209fdf6-9c8e-4224-ada0-79d2d251c5f0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-156044785-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdd3bff4958346c19a8ce90fdb044a18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "674802e7-b847-4bef-a7a8-f90ac7a3a0a7", "external-id": "nsx-vlan-transportzone-953", "segmentation_id": 953, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1df9ee9a-28", "ovs_interfaceid": "1df9ee9a-286d-4a99-87db-f8e975007555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1034.350152] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:ea:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '674802e7-b847-4bef-a7a8-f90ac7a3a0a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1df9ee9a-286d-4a99-87db-f8e975007555', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.357660] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Creating folder: Project (cdd3bff4958346c19a8ce90fdb044a18). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1034.358193] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab184349-0c43-4d23-90b7-9b014703d2d6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.371154] env[62507]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1034.371353] env[62507]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62507) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1034.371701] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Folder already exists: Project (cdd3bff4958346c19a8ce90fdb044a18). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.371954] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Creating folder: Instances. Parent ref: group-v498039. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1034.372224] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0bbaffb-b647-4a6f-8532-381f09bd5066 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.380664] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Created folder: Instances in parent group-v498039. [ 1034.380881] env[62507]: DEBUG oslo.service.loopingcall [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.381063] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1034.381252] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da42b7f5-7c8b-4b1b-a4a6-637e5a33f858 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.398949] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.398949] env[62507]: value = "task-2459999" [ 1034.398949] env[62507]: _type = "Task" [ 1034.398949] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.405868] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459999, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.908744] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2459999, 'name': CreateVM_Task, 'duration_secs': 0.294937} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.908929] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1034.909589] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-498042', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'name': 'volume-3d365cd2-36ad-430c-b85c-eca284e00e28', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fea54d3-0637-4811-9ff3-1a72bc4e08ec', 'attached_at': '', 'detached_at': '', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'serial': '3d365cd2-36ad-430c-b85c-eca284e00e28'}, 'boot_index': 0, 'guest_format': None, 'mount_device': '/dev/sda', 'attachment_id': '5f3e2b44-38bb-45e2-b6b2-37053e17e07f', 'device_type': None, 'disk_bus': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62507) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1034.909817] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Root volume attach. Driver type: vmdk {{(pid=62507) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1034.910582] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e52cd-1fc2-4593-8b5a-07f57ff4d055 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.918333] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2e7cb4-e7b5-4507-8df2-de5597f8ed48 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.923870] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e2b08e-95d7-46f3-8f3c-23693b849510 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.929865] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-568069aa-a09f-49a2-8d09-986153fe1529 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.938271] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1034.938271] env[62507]: value = "task-2460000" [ 1034.938271] env[62507]: _type = "Task" [ 1034.938271] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.945357] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460000, 'name': RelocateVM_Task} progress is 5%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.450123] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460000, 'name': RelocateVM_Task, 'duration_secs': 0.025063} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.450404] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Volume attach. Driver type: vmdk {{(pid=62507) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1035.450607] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-498042', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'name': 'volume-3d365cd2-36ad-430c-b85c-eca284e00e28', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fea54d3-0637-4811-9ff3-1a72bc4e08ec', 'attached_at': '', 'detached_at': '', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'serial': '3d365cd2-36ad-430c-b85c-eca284e00e28'} {{(pid=62507) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1035.451363] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09d2b88-c32f-4e88-ae37-659702b10c80 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.467489] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e23517a-301a-42ae-8fb0-33eb7b4424b8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.488872] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] volume-3d365cd2-36ad-430c-b85c-eca284e00e28/volume-3d365cd2-36ad-430c-b85c-eca284e00e28.vmdk or device None with type thin {{(pid=62507) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.489108] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fe48df4-2494-4253-a6bf-aa5c384538a3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.507776] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1035.507776] env[62507]: value = "task-2460001" [ 1035.507776] env[62507]: _type = "Task" [ 1035.507776] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.514832] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460001, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.974922] env[62507]: DEBUG nova.compute.manager [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Received event network-changed-1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1035.975203] env[62507]: DEBUG nova.compute.manager [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Refreshing instance network info cache due to event network-changed-1df9ee9a-286d-4a99-87db-f8e975007555. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1035.975392] env[62507]: DEBUG oslo_concurrency.lockutils [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] Acquiring lock "refresh_cache-2fea54d3-0637-4811-9ff3-1a72bc4e08ec" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.975520] env[62507]: DEBUG oslo_concurrency.lockutils [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] Acquired lock "refresh_cache-2fea54d3-0637-4811-9ff3-1a72bc4e08ec" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.975658] env[62507]: DEBUG nova.network.neutron [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Refreshing network info cache for port 1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1036.020382] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460001, 'name': ReconfigVM_Task, 'duration_secs': 0.251813} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.020959] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Reconfigured VM instance instance-0000002e to attach disk [datastore2] volume-3d365cd2-36ad-430c-b85c-eca284e00e28/volume-3d365cd2-36ad-430c-b85c-eca284e00e28.vmdk or device None with type thin {{(pid=62507) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.026233] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eafe6e8e-8eb8-448f-9bc6-17cceb477ef8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.040397] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1036.040397] env[62507]: value = "task-2460002" [ 1036.040397] env[62507]: _type = "Task" [ 1036.040397] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.049728] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460002, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.453067] env[62507]: DEBUG nova.network.neutron [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Updated VIF entry in instance network info cache for port 1df9ee9a-286d-4a99-87db-f8e975007555. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1036.453434] env[62507]: DEBUG nova.network.neutron [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Updating instance_info_cache with network_info: [{"id": "1df9ee9a-286d-4a99-87db-f8e975007555", "address": "fa:16:3e:5f:ea:df", "network": {"id": "e209fdf6-9c8e-4224-ada0-79d2d251c5f0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-156044785-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdd3bff4958346c19a8ce90fdb044a18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "674802e7-b847-4bef-a7a8-f90ac7a3a0a7", "external-id": "nsx-vlan-transportzone-953", "segmentation_id": 953, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1df9ee9a-28", "ovs_interfaceid": "1df9ee9a-286d-4a99-87db-f8e975007555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.462948] env[62507]: DEBUG oslo_concurrency.lockutils [req-e1d4fb9d-7244-4af4-b92b-3cc4609f6a46 req-f674f4c2-37a1-43cb-95ba-04a176c6b85e service nova] Releasing lock "refresh_cache-2fea54d3-0637-4811-9ff3-1a72bc4e08ec" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.549369] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460002, 'name': ReconfigVM_Task, 'duration_secs': 0.1122} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.549657] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-498042', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'name': 'volume-3d365cd2-36ad-430c-b85c-eca284e00e28', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fea54d3-0637-4811-9ff3-1a72bc4e08ec', 'attached_at': '', 'detached_at': '', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'serial': '3d365cd2-36ad-430c-b85c-eca284e00e28'} {{(pid=62507) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1036.550249] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10b87a56-9465-4df9-9258-cabb4d2b0b01 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.556226] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1036.556226] env[62507]: value = "task-2460003" [ 1036.556226] env[62507]: _type = "Task" [ 1036.556226] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.563172] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460003, 'name': Rename_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.067667] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460003, 'name': Rename_Task, 'duration_secs': 0.112961} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.067950] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Powering on the VM {{(pid=62507) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1037.068204] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ab9d475-55f0-4721-896d-639583e50efa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.074717] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1037.074717] env[62507]: value = "task-2460004" [ 1037.074717] env[62507]: _type = "Task" [ 1037.074717] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.082259] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460004, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.583944] env[62507]: DEBUG oslo_vmware.api [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460004, 'name': PowerOnVM_Task, 'duration_secs': 0.420127} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.584207] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Powered on the VM {{(pid=62507) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1037.584423] env[62507]: INFO nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Took 4.36 seconds to spawn the instance on the hypervisor. [ 1037.584680] env[62507]: DEBUG nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Checking state {{(pid=62507) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1037.585435] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa4ccaf-185e-4f65-82bb-2bd3f9f26091 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.640279] env[62507]: INFO nova.compute.manager [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Took 5.29 seconds to build instance. [ 1037.654013] env[62507]: DEBUG oslo_concurrency.lockutils [None req-bc60bb0c-ffef-4d5d-b8b3-4ba0740d7152 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 194.535s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.663025] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1037.714802] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.715067] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.716660] env[62507]: INFO nova.compute.claims [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1038.018568] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a4379b-0e49-4e05-a903-29048e067fe9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.026672] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66862f66-e70f-4584-98d2-0633d5fcd51d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.056633] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbd33e1-0071-427d-8bc6-d75ea5064a0f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.064550] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e7a2c3-8195-49f8-a68f-49012101316c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.078050] env[62507]: DEBUG nova.compute.provider_tree [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.087426] env[62507]: DEBUG nova.scheduler.client.report [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1038.103095] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.103568] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1038.135867] env[62507]: DEBUG nova.compute.utils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1038.137428] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1038.137605] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1038.148147] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1038.209939] env[62507]: DEBUG nova.policy [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0389e5517d8840fab73ed22fea7d5dc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1bda6c3eb5840aba9355b2ac4a3188c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1038.214035] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1038.237896] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1038.238168] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1038.238336] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.239029] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1038.239029] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.239029] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1038.239269] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1038.239269] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1038.239406] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1038.239564] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1038.239735] env[62507]: DEBUG nova.virt.hardware [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1038.240677] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e3cb5e-a9ab-44bb-b245-9cbcf3cb6bd5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.248716] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4fec10-9dd8-44da-a2d5-1cddc18ba63d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.547498] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Successfully created port: 37c39c64-1bc3-4bba-90c9-389017992a35 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.826521] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquiring lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.826762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.826963] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquiring lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.827157] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.827392] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.832457] env[62507]: INFO nova.compute.manager [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Terminating instance [ 1038.834521] env[62507]: DEBUG nova.compute.manager [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1038.834734] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Powering off the VM {{(pid=62507) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1038.834957] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf6c8fc9-0e42-4122-a19a-f9a9cced94e0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.847635] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1038.847635] env[62507]: value = "task-2460005" [ 1038.847635] env[62507]: _type = "Task" [ 1038.847635] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.861030] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.359197] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460005, 'name': PowerOffVM_Task, 'duration_secs': 0.152387} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.359476] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Powered off the VM {{(pid=62507) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1039.359721] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Volume detach. Driver type: vmdk {{(pid=62507) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1039.359865] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-498042', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'name': 'volume-3d365cd2-36ad-430c-b85c-eca284e00e28', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fea54d3-0637-4811-9ff3-1a72bc4e08ec', 'attached_at': '', 'detached_at': '', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'serial': '3d365cd2-36ad-430c-b85c-eca284e00e28'} {{(pid=62507) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1039.360679] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc1d60e-d2b8-4866-bd45-b755a29d4c1f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.382797] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925d5e05-5043-4a30-b414-a80d7a6e80b7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.390027] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbe3959-c830-4302-bbad-8a82401f7f88 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.407826] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c2b0e8-436d-4bbf-8447-f29b7f0fbd12 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.422495] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] The volume has not been displaced from its original location: [datastore2] volume-3d365cd2-36ad-430c-b85c-eca284e00e28/volume-3d365cd2-36ad-430c-b85c-eca284e00e28.vmdk. No consolidation needed. {{(pid=62507) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1039.428464] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=62507) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1039.428751] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edf9d2b1-ac15-46cd-b368-3a81d93ded56 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.446880] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1039.446880] env[62507]: value = "task-2460006" [ 1039.446880] env[62507]: _type = "Task" [ 1039.446880] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.457499] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460006, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.755063] env[62507]: DEBUG nova.compute.manager [req-cc839795-c812-49f8-b278-49533b604ba0 req-581cf1e5-bb59-49c8-a32d-267a82833dbe service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Received event network-vif-plugged-37c39c64-1bc3-4bba-90c9-389017992a35 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1039.755282] env[62507]: DEBUG oslo_concurrency.lockutils [req-cc839795-c812-49f8-b278-49533b604ba0 req-581cf1e5-bb59-49c8-a32d-267a82833dbe service nova] Acquiring lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.755485] env[62507]: DEBUG oslo_concurrency.lockutils [req-cc839795-c812-49f8-b278-49533b604ba0 req-581cf1e5-bb59-49c8-a32d-267a82833dbe service nova] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.755649] env[62507]: DEBUG oslo_concurrency.lockutils [req-cc839795-c812-49f8-b278-49533b604ba0 req-581cf1e5-bb59-49c8-a32d-267a82833dbe service nova] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.755809] env[62507]: DEBUG nova.compute.manager [req-cc839795-c812-49f8-b278-49533b604ba0 req-581cf1e5-bb59-49c8-a32d-267a82833dbe service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] No waiting events found dispatching network-vif-plugged-37c39c64-1bc3-4bba-90c9-389017992a35 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1039.755965] env[62507]: WARNING nova.compute.manager [req-cc839795-c812-49f8-b278-49533b604ba0 req-581cf1e5-bb59-49c8-a32d-267a82833dbe service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Received unexpected event network-vif-plugged-37c39c64-1bc3-4bba-90c9-389017992a35 for instance with vm_state building and task_state spawning. [ 1039.957362] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460006, 'name': ReconfigVM_Task, 'duration_secs': 0.149671} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.957668] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=62507) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1039.964402] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c12644ff-df70-4e3b-8558-ca4aa1e47ea9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.976171] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Successfully updated port: 37c39c64-1bc3-4bba-90c9-389017992a35 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.982810] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1039.982810] env[62507]: value = "task-2460007" [ 1039.982810] env[62507]: _type = "Task" [ 1039.982810] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.991627] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.992686] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.992828] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquired lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.992975] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1040.072067] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1040.187057] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.436822] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Updating instance_info_cache with network_info: [{"id": "37c39c64-1bc3-4bba-90c9-389017992a35", "address": "fa:16:3e:16:b3:de", "network": {"id": "010b6aab-a787-46cb-a41c-82df14c895a1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1118985165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1bda6c3eb5840aba9355b2ac4a3188c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37c39c64-1b", "ovs_interfaceid": "37c39c64-1bc3-4bba-90c9-389017992a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.450129] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Releasing lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.450434] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance network_info: |[{"id": "37c39c64-1bc3-4bba-90c9-389017992a35", "address": "fa:16:3e:16:b3:de", "network": {"id": "010b6aab-a787-46cb-a41c-82df14c895a1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1118985165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1bda6c3eb5840aba9355b2ac4a3188c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37c39c64-1b", "ovs_interfaceid": "37c39c64-1bc3-4bba-90c9-389017992a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1040.450845] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:b3:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24727047-6358-4015-86c1-394ab07fb88f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37c39c64-1bc3-4bba-90c9-389017992a35', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.458881] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Creating folder: Project (f1bda6c3eb5840aba9355b2ac4a3188c). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1040.459485] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-046bac77-d0c4-474c-bf71-5415e1c2cf92 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.471430] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Created folder: Project (f1bda6c3eb5840aba9355b2ac4a3188c) in parent group-v497991. [ 1040.471430] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Creating folder: Instances. Parent ref: group-v498057. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1040.471430] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c48d4311-ebb6-4c4a-92b7-e3d01c5055c6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.483303] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Created folder: Instances in parent group-v498057. [ 1040.483303] env[62507]: DEBUG oslo.service.loopingcall [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.483725] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1040.487405] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47935276-0ebd-44e9-9b35-b403603b7760 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.505492] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.510849] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460007, 'name': ReconfigVM_Task, 'duration_secs': 0.104092} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.512190] env[62507]: DEBUG nova.virt.vmwareapi.volumeops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-498042', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'name': 'volume-3d365cd2-36ad-430c-b85c-eca284e00e28', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fea54d3-0637-4811-9ff3-1a72bc4e08ec', 'attached_at': '', 'detached_at': '', 'volume_id': '3d365cd2-36ad-430c-b85c-eca284e00e28', 'serial': '3d365cd2-36ad-430c-b85c-eca284e00e28'} {{(pid=62507) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1040.512394] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.512630] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.512630] env[62507]: value = "task-2460010" [ 1040.512630] env[62507]: _type = "Task" [ 1040.512630] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.513320] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7533c0e2-507f-4802-a285-c1f8ecd29a8f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.525415] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460010, 'name': CreateVM_Task} progress is 6%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.527482] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1040.527723] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fedacc1-642d-4a82-b96d-0c9b43dc5369 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.587040] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1040.587397] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1040.588125] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Deleting the datastore file [datastore2] 2fea54d3-0637-4811-9ff3-1a72bc4e08ec {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.588125] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ed1d674-8d94-40f6-8cb1-05185fe8089e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.594752] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for the task: (returnval){ [ 1040.594752] env[62507]: value = "task-2460012" [ 1040.594752] env[62507]: _type = "Task" [ 1040.594752] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.603420] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.026811] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460010, 'name': CreateVM_Task, 'duration_secs': 0.286454} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.026987] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1041.027817] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.028028] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.028378] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1041.028723] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c663068-b818-4db5-b977-132cd5dbe823 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.033580] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Waiting for the task: (returnval){ [ 1041.033580] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529c9b43-3c6e-ac20-29d0-681b06b89c79" [ 1041.033580] env[62507]: _type = "Task" [ 1041.033580] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.041147] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529c9b43-3c6e-ac20-29d0-681b06b89c79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.103447] env[62507]: DEBUG oslo_vmware.api [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Task: {'id': task-2460012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079695} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.103695] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.103926] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1041.104226] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.104404] env[62507]: INFO nova.compute.manager [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1041.104647] env[62507]: DEBUG oslo.service.loopingcall [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.104826] env[62507]: DEBUG nova.compute.manager [-] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1041.105819] env[62507]: DEBUG nova.network.neutron [-] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.167750] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.440641] env[62507]: DEBUG nova.network.neutron [-] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.479692] env[62507]: INFO nova.compute.manager [-] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Took 0.37 seconds to deallocate network for instance. [ 1041.546363] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.546477] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.546747] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.572282] env[62507]: INFO nova.compute.manager [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Took 0.09 seconds to detach 1 volumes for instance. [ 1041.575025] env[62507]: DEBUG nova.compute.manager [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Deleting volume: 3d365cd2-36ad-430c-b85c-eca284e00e28 {{(pid=62507) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3238}} [ 1041.646821] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.647088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.647401] env[62507]: DEBUG nova.objects.instance [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lazy-loading 'resources' on Instance uuid 2fea54d3-0637-4811-9ff3-1a72bc4e08ec {{(pid=62507) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.790690] env[62507]: DEBUG nova.compute.manager [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Received event network-changed-37c39c64-1bc3-4bba-90c9-389017992a35 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1041.790875] env[62507]: DEBUG nova.compute.manager [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Refreshing instance network info cache due to event network-changed-37c39c64-1bc3-4bba-90c9-389017992a35. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1041.791102] env[62507]: DEBUG oslo_concurrency.lockutils [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] Acquiring lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.791456] env[62507]: DEBUG oslo_concurrency.lockutils [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] Acquired lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.791456] env[62507]: DEBUG nova.network.neutron [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Refreshing network info cache for port 37c39c64-1bc3-4bba-90c9-389017992a35 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1041.986728] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7363e46-d764-4eb1-ae08-2442a1c31ee0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.994963] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f138fb6a-d4f7-4e25-8706-9099d60f19fc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.033134] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5620bc-b8eb-4245-a130-420b5534dbae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.041019] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f905671-b8ee-468c-9557-001be18973f4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.057737] env[62507]: DEBUG nova.compute.provider_tree [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.069021] env[62507]: DEBUG nova.scheduler.client.report [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1042.089112] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.442s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.111816] env[62507]: INFO nova.scheduler.client.report [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Deleted allocations for instance 2fea54d3-0637-4811-9ff3-1a72bc4e08ec [ 1042.169422] env[62507]: DEBUG oslo_concurrency.lockutils [None req-588dcb42-e723-4e6c-8017-d979d8f4bbe4 tempest-ServersTestBootFromVolume-1967178980 tempest-ServersTestBootFromVolume-1967178980-project-member] Lock "2fea54d3-0637-4811-9ff3-1a72bc4e08ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.343s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.260962] env[62507]: DEBUG nova.network.neutron [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Updated VIF entry in instance network info cache for port 37c39c64-1bc3-4bba-90c9-389017992a35. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1042.261317] env[62507]: DEBUG nova.network.neutron [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Updating instance_info_cache with network_info: [{"id": "37c39c64-1bc3-4bba-90c9-389017992a35", "address": "fa:16:3e:16:b3:de", "network": {"id": "010b6aab-a787-46cb-a41c-82df14c895a1", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1118985165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1bda6c3eb5840aba9355b2ac4a3188c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24727047-6358-4015-86c1-394ab07fb88f", "external-id": "nsx-vlan-transportzone-476", "segmentation_id": 476, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37c39c64-1b", "ovs_interfaceid": "37c39c64-1bc3-4bba-90c9-389017992a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.270997] env[62507]: DEBUG oslo_concurrency.lockutils [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] Releasing lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.271267] env[62507]: DEBUG nova.compute.manager [req-42428164-82c0-4ba8-9891-fbb75e46afab req-4bbf1dce-aad9-4bc2-9614-0ac7d09010bc service nova] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Received event network-vif-deleted-1df9ee9a-286d-4a99-87db-f8e975007555 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1043.167870] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.163307] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.170508] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.172092] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1044.172092] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1044.197648] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.197821] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.197956] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198684] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198684] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198787] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1044.198901] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1044.199443] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.199589] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1046.168867] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.168867] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.168867] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.179912] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.180105] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.180630] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.180630] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1046.182331] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda2831c-9c89-4c92-8251-80dac2deaea2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.191750] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1f6bcc-942a-4bdc-988d-9647f07cd161 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.211483] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af15f6ad-5b2d-44d7-b4d5-857e05a2491c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.220174] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810ffb02-92a1-4090-9a6b-53b343aa7d63 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.259758] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181154MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1046.260470] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.260470] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.325171] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.325469] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.355088] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.356964] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.356964] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.356964] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.356964] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.357174] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.357174] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.357174] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.357174] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.357304] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.372485] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.386175] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a68f6eb0-a549-4c52-b349-bcbc8e2b8669 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.400415] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.426035] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dcf96348-5199-4c3f-9661-5ac0924c5b96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.439612] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9da52346-c500-4335-8f4c-39cf56322589 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.455477] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.468144] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9e1c954f-3a25-46f8-a34b-9fa859053951 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.486917] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.500011] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b86fd157-ec5a-4e61-967a-c7cdd86bfea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.513254] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2e7dfd95-dc72-4dd8-9602-dd1af3d330a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.524463] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b49dd64-781d-48c8-ac86-0c523b39f99a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.538521] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f70eaaec-66d0-4ec0-b947-3eaa9d6038ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.555939] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.555939] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.555939] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1047.505883] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69463cc2-d24f-4bdd-ab2c-078ee5a291cf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.513831] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1441c1a-8e0c-4d32-a7fd-26454337e0ac {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.543657] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131cae2a-bde7-47e4-af20-c59d2c822314 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.551218] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b78d7c-1d65-490b-b55d-e0ca716051a2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.565686] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.575137] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1047.589959] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1047.590335] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.330s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.743545] env[62507]: WARNING oslo_vmware.rw_handles [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1079.743545] env[62507]: ERROR oslo_vmware.rw_handles [ 1079.744277] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1079.746305] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1079.746612] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Copying Virtual Disk [datastore2] vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/2c02c06a-ecfd-4515-afc0-1a681e77e6d5/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1079.746906] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2097ed9f-0813-49be-97a2-bfd97518bf69 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.757923] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 1079.757923] env[62507]: value = "task-2460014" [ 1079.757923] env[62507]: _type = "Task" [ 1079.757923] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.765921] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': task-2460014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.268319] env[62507]: DEBUG oslo_vmware.exceptions [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1080.268588] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.269138] env[62507]: ERROR nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1080.269138] env[62507]: Faults: ['InvalidArgument'] [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Traceback (most recent call last): [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] yield resources [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self.driver.spawn(context, instance, image_meta, [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self._fetch_image_if_missing(context, vi) [ 1080.269138] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] image_cache(vi, tmp_image_ds_loc) [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] vm_util.copy_virtual_disk( [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] session._wait_for_task(vmdk_copy_task) [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] return self.wait_for_task(task_ref) [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] return evt.wait() [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] result = hub.switch() [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1080.269510] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] return self.greenlet.switch() [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self.f(*self.args, **self.kw) [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] raise exceptions.translate_fault(task_info.error) [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Faults: ['InvalidArgument'] [ 1080.269883] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] [ 1080.269883] env[62507]: INFO nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Terminating instance [ 1080.271015] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.271224] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1080.271454] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4af9d2e1-d137-4c01-b112-0054902d5928 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.273565] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1080.273759] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1080.274464] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be22ba5-f95b-4ff2-a712-c44f36bd19af {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.280966] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1080.281174] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd05f4e9-ec89-41b5-969e-696e208f1726 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.283211] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1080.283385] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1080.284283] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fa635db-0401-4100-af85-5e53faaac7e2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.289114] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1080.289114] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]526155b7-5895-7377-82bf-c70860b9f552" [ 1080.289114] env[62507]: _type = "Task" [ 1080.289114] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.296226] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]526155b7-5895-7377-82bf-c70860b9f552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.354835] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1080.355064] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1080.355251] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Deleting the datastore file [datastore2] 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.355511] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb887b3c-0784-4e02-820e-38ad86bbc595 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.361788] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 1080.361788] env[62507]: value = "task-2460016" [ 1080.361788] env[62507]: _type = "Task" [ 1080.361788] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.370106] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': task-2460016, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.800304] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1080.800600] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating directory with path [datastore2] vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1080.800789] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b074a800-1f7a-4acf-8782-f2e7d8cd8b44 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.812952] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created directory with path [datastore2] vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1080.812952] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Fetch image to [datastore2] vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1080.812952] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1080.813197] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb452b5-c952-4f04-a13a-e821f663fdb8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.819572] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5f4bdf-4bae-425b-808a-dd62e60d08de {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.828349] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da34fb2-ffcf-486d-8b40-dda2a029f9d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.858133] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f6216c-2491-4cdf-b23c-c35176f861c1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.866413] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-845f83fa-1bec-46f9-aef2-cef0696ade54 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.873819] env[62507]: DEBUG oslo_vmware.api [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': task-2460016, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075273} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.874102] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.874286] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1080.874450] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1080.874652] env[62507]: INFO nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1080.876905] env[62507]: DEBUG nova.compute.claims [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1080.876905] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.877131] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.889857] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1080.951741] env[62507]: DEBUG oslo_vmware.rw_handles [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1081.011700] env[62507]: DEBUG oslo_vmware.rw_handles [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1081.011899] env[62507]: DEBUG oslo_vmware.rw_handles [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1081.236539] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d68aa10-9299-4ae0-ace2-3693edf81f0b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.243706] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ee8b17-88ca-4dd3-ad67-544ad41c432b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.272777] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9293a18-27b6-437d-a22b-aeaffbf88d80 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.279909] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f3828f-e9a1-4fee-a9d1-937b5e2e51f6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.292488] env[62507]: DEBUG nova.compute.provider_tree [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.302332] env[62507]: DEBUG nova.scheduler.client.report [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1081.317298] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.440s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.317821] env[62507]: ERROR nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1081.317821] env[62507]: Faults: ['InvalidArgument'] [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Traceback (most recent call last): [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self.driver.spawn(context, instance, image_meta, [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self._fetch_image_if_missing(context, vi) [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] image_cache(vi, tmp_image_ds_loc) [ 1081.317821] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] vm_util.copy_virtual_disk( [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] session._wait_for_task(vmdk_copy_task) [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] return self.wait_for_task(task_ref) [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] return evt.wait() [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] result = hub.switch() [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] return self.greenlet.switch() [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1081.318213] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] self.f(*self.args, **self.kw) [ 1081.318615] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1081.318615] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] raise exceptions.translate_fault(task_info.error) [ 1081.318615] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1081.318615] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Faults: ['InvalidArgument'] [ 1081.318615] env[62507]: ERROR nova.compute.manager [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] [ 1081.318615] env[62507]: DEBUG nova.compute.utils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1081.319937] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Build of instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 was re-scheduled: A specified parameter was not correct: fileType [ 1081.319937] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1081.320315] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1081.320486] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1081.320657] env[62507]: DEBUG nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1081.320822] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1081.636194] env[62507]: DEBUG nova.network.neutron [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.650209] env[62507]: INFO nova.compute.manager [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Took 0.33 seconds to deallocate network for instance. [ 1081.743020] env[62507]: INFO nova.scheduler.client.report [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Deleted allocations for instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 [ 1081.767449] env[62507]: DEBUG oslo_concurrency.lockutils [None req-02d04c61-64f3-4bc7-bc07-55cf81515d9a tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 516.638s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.768633] env[62507]: DEBUG oslo_concurrency.lockutils [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 118.516s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.768850] env[62507]: DEBUG oslo_concurrency.lockutils [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.769361] env[62507]: DEBUG oslo_concurrency.lockutils [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.769543] env[62507]: DEBUG oslo_concurrency.lockutils [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.771528] env[62507]: INFO nova.compute.manager [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Terminating instance [ 1081.773285] env[62507]: DEBUG nova.compute.manager [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1081.773476] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1081.774014] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63a6d581-9fe7-4e5d-ac8c-102f3c75d2d9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.783547] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e190fb-93e3-486b-926b-3c43d717fb1e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.798407] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1081.821720] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1dcce6af-d9f7-4a24-97c7-4b0425c39d68 could not be found. [ 1081.822044] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1081.822121] env[62507]: INFO nova.compute.manager [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1081.822374] env[62507]: DEBUG oslo.service.loopingcall [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.822609] env[62507]: DEBUG nova.compute.manager [-] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1081.822706] env[62507]: DEBUG nova.network.neutron [-] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1081.857533] env[62507]: DEBUG nova.network.neutron [-] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.860414] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.860414] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.861113] env[62507]: INFO nova.compute.claims [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.867074] env[62507]: INFO nova.compute.manager [-] [instance: 1dcce6af-d9f7-4a24-97c7-4b0425c39d68] Took 0.04 seconds to deallocate network for instance. [ 1081.971088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-11e66b5b-5e56-4a71-8222-f5fd75327387 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "1dcce6af-d9f7-4a24-97c7-4b0425c39d68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.173749] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5f7314-6b16-48da-98b0-9de7a3984e98 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.181395] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fca202b-1f10-4b4c-8b29-39dc0a6429b2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.211647] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdf79ea-d4fa-40cf-a4ab-c9193de30013 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.219025] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbf1d0b-91eb-467e-9728-57a21b2dfe5c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.232383] env[62507]: DEBUG nova.compute.provider_tree [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.240861] env[62507]: DEBUG nova.scheduler.client.report [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1082.260729] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.400s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.260729] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1082.295206] env[62507]: DEBUG nova.compute.utils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1082.296577] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1082.297715] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1082.305547] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1082.379069] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1082.385776] env[62507]: DEBUG nova.policy [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '751698c254a140919588ea005a5e586d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e8135bf41224c058bca7f453921f08c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1082.407468] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1082.407870] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1082.408054] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1082.408247] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1082.408394] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1082.408542] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1082.408749] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1082.408917] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1082.409142] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1082.409322] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1082.409494] env[62507]: DEBUG nova.virt.hardware [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1082.410338] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787bfb38-a595-4c89-870b-ec49193000fb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.418712] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4b44b6-1769-4f26-a4ea-8558eaa1dab9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.917900] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Successfully created port: 65d20342-d5a3-4a6d-8641-f02aa27bd6ec {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.244888] env[62507]: DEBUG nova.compute.manager [req-6fff100f-bfa5-4c01-9492-4309d656daa7 req-59bdaa54-54c1-4497-9b90-a220fd8bef38 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Received event network-vif-plugged-65d20342-d5a3-4a6d-8641-f02aa27bd6ec {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1084.245179] env[62507]: DEBUG oslo_concurrency.lockutils [req-6fff100f-bfa5-4c01-9492-4309d656daa7 req-59bdaa54-54c1-4497-9b90-a220fd8bef38 service nova] Acquiring lock "b53bed7e-5e76-4aa5-abe2-b05750497404-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.245351] env[62507]: DEBUG oslo_concurrency.lockutils [req-6fff100f-bfa5-4c01-9492-4309d656daa7 req-59bdaa54-54c1-4497-9b90-a220fd8bef38 service nova] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.245522] env[62507]: DEBUG oslo_concurrency.lockutils [req-6fff100f-bfa5-4c01-9492-4309d656daa7 req-59bdaa54-54c1-4497-9b90-a220fd8bef38 service nova] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.245702] env[62507]: DEBUG nova.compute.manager [req-6fff100f-bfa5-4c01-9492-4309d656daa7 req-59bdaa54-54c1-4497-9b90-a220fd8bef38 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] No waiting events found dispatching network-vif-plugged-65d20342-d5a3-4a6d-8641-f02aa27bd6ec {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1084.245864] env[62507]: WARNING nova.compute.manager [req-6fff100f-bfa5-4c01-9492-4309d656daa7 req-59bdaa54-54c1-4497-9b90-a220fd8bef38 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Received unexpected event network-vif-plugged-65d20342-d5a3-4a6d-8641-f02aa27bd6ec for instance with vm_state building and task_state spawning. [ 1084.432021] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Successfully updated port: 65d20342-d5a3-4a6d-8641-f02aa27bd6ec {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.445431] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "refresh_cache-b53bed7e-5e76-4aa5-abe2-b05750497404" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.445590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "refresh_cache-b53bed7e-5e76-4aa5-abe2-b05750497404" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.445749] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1084.527030] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1084.901609] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Updating instance_info_cache with network_info: [{"id": "65d20342-d5a3-4a6d-8641-f02aa27bd6ec", "address": "fa:16:3e:76:cd:a0", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65d20342-d5", "ovs_interfaceid": "65d20342-d5a3-4a6d-8641-f02aa27bd6ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.917169] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "refresh_cache-b53bed7e-5e76-4aa5-abe2-b05750497404" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.917463] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance network_info: |[{"id": "65d20342-d5a3-4a6d-8641-f02aa27bd6ec", "address": "fa:16:3e:76:cd:a0", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65d20342-d5", "ovs_interfaceid": "65d20342-d5a3-4a6d-8641-f02aa27bd6ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1084.917880] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:cd:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65d20342-d5a3-4a6d-8641-f02aa27bd6ec', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1084.925300] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating folder: Project (2e8135bf41224c058bca7f453921f08c). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1084.926172] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-143adf8e-f9b5-4b67-8072-2c88e5659243 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.935525] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created folder: Project (2e8135bf41224c058bca7f453921f08c) in parent group-v497991. [ 1084.935707] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating folder: Instances. Parent ref: group-v498060. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1084.935919] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a564989f-4106-4140-b772-732c4fbd7c66 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.944012] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created folder: Instances in parent group-v498060. [ 1084.944243] env[62507]: DEBUG oslo.service.loopingcall [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.944418] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1084.944610] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4edb33f-a93c-439c-93d5-f98fb48fc076 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.962863] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1084.962863] env[62507]: value = "task-2460019" [ 1084.962863] env[62507]: _type = "Task" [ 1084.962863] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.970086] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460019, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.478619] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460019, 'name': CreateVM_Task, 'duration_secs': 0.283316} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.478939] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1085.481994] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.482180] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.482506] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1085.482766] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-458a107e-63c6-4414-8bee-69e8faf335d6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.487862] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1085.487862] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52ca022d-4853-1179-a8ee-8ac273e78bc9" [ 1085.487862] env[62507]: _type = "Task" [ 1085.487862] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.495718] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52ca022d-4853-1179-a8ee-8ac273e78bc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.997755] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.998112] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1085.998393] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.387086] env[62507]: DEBUG nova.compute.manager [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Received event network-changed-65d20342-d5a3-4a6d-8641-f02aa27bd6ec {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1086.387284] env[62507]: DEBUG nova.compute.manager [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Refreshing instance network info cache due to event network-changed-65d20342-d5a3-4a6d-8641-f02aa27bd6ec. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1086.387502] env[62507]: DEBUG oslo_concurrency.lockutils [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] Acquiring lock "refresh_cache-b53bed7e-5e76-4aa5-abe2-b05750497404" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.387673] env[62507]: DEBUG oslo_concurrency.lockutils [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] Acquired lock "refresh_cache-b53bed7e-5e76-4aa5-abe2-b05750497404" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.387879] env[62507]: DEBUG nova.network.neutron [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Refreshing network info cache for port 65d20342-d5a3-4a6d-8641-f02aa27bd6ec {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1086.777368] env[62507]: DEBUG nova.network.neutron [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Updated VIF entry in instance network info cache for port 65d20342-d5a3-4a6d-8641-f02aa27bd6ec. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1086.778459] env[62507]: DEBUG nova.network.neutron [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Updating instance_info_cache with network_info: [{"id": "65d20342-d5a3-4a6d-8641-f02aa27bd6ec", "address": "fa:16:3e:76:cd:a0", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65d20342-d5", "ovs_interfaceid": "65d20342-d5a3-4a6d-8641-f02aa27bd6ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.788577] env[62507]: DEBUG oslo_concurrency.lockutils [req-36180c9d-df12-4913-829f-013e02ce27f4 req-311644a5-53a6-4367-9ba6-5f1bca12adc5 service nova] Releasing lock "refresh_cache-b53bed7e-5e76-4aa5-abe2-b05750497404" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.083088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.083334] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.482157] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.482388] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.796637] env[62507]: DEBUG oslo_concurrency.lockutils [None req-09418f37-2639-41f1-9799-fdefa079324f tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "0bc7792e-d291-46ef-9ac1-420959c38191" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.796925] env[62507]: DEBUG oslo_concurrency.lockutils [None req-09418f37-2639-41f1-9799-fdefa079324f tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "0bc7792e-d291-46ef-9ac1-420959c38191" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.549994] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "b53bed7e-5e76-4aa5-abe2-b05750497404" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.911133] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ccd929f6-3a22-4960-aa73-75d6678ca2ff tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.911452] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ccd929f6-3a22-4960-aa73-75d6678ca2ff tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.590652] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.614886] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.167407] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.163492] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.167243] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.167592] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.167974] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1106.167974] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1106.191194] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.191194] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.191194] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.191194] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.191453] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.191701] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.191944] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.192201] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.192424] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.192654] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1106.194023] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1106.194023] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.194023] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.194023] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1106.194318] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.194318] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances with incomplete migration {{(pid=62507) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1108.178045] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.178045] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.189713] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.189915] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.190096] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.190257] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1108.191622] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd468666-dc49-43cc-8eb5-faeb551c8bec {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.202131] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf9dbf7-2bed-4ba8-84af-4b94d44158ed {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.216623] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d886f512-414f-4428-8d82-23ffe734d3c8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.223022] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0d3da4-986e-4969-895e-310717c9de6d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.250888] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181158MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1108.251108] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.251326] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.399428] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.399603] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.399738] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.399863] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.399998] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.400178] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.400300] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.400418] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.400535] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.400648] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1108.411846] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a68f6eb0-a549-4c52-b349-bcbc8e2b8669 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.422081] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.432389] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance dcf96348-5199-4c3f-9661-5ac0924c5b96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.442243] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9da52346-c500-4335-8f4c-39cf56322589 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.451961] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.461567] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9e1c954f-3a25-46f8-a34b-9fa859053951 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.470602] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.479579] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b86fd157-ec5a-4e61-967a-c7cdd86bfea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.489890] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2e7dfd95-dc72-4dd8-9602-dd1af3d330a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.499205] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b49dd64-781d-48c8-ac86-0c523b39f99a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.509969] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f70eaaec-66d0-4ec0-b947-3eaa9d6038ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.520422] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.530442] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.540130] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.549499] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0bc7792e-d291-46ef-9ac1-420959c38191 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.559252] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1108.559475] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1108.559621] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1108.575371] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing inventories for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1108.589205] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating ProviderTree inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1108.589395] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1108.600352] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing aggregate associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, aggregates: None {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1108.617740] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing trait associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1108.895742] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08520d6-48fa-498e-a574-e51e785d968e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.903329] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d3657b-f543-4c1f-b508-6e5eb77a0038 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.932139] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d4f150-bf89-406d-92bd-174ef510dd00 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.938800] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321f6390-31b7-4ae3-bca4-b1ba4e60d3e9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.951081] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.959603] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1108.972722] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1108.973014] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.722s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.168152] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.168266] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1109.182860] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] There are 1 instances to clean {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1109.183148] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2fea54d3-0637-4811-9ff3-1a72bc4e08ec] Instance has had 0 of 5 cleanup attempts {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11232}} [ 1109.224246] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.375304] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.397719] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Getting list of instances from cluster (obj){ [ 1111.397719] env[62507]: value = "domain-c8" [ 1111.397719] env[62507]: _type = "ClusterComputeResource" [ 1111.397719] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1111.399274] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf72abcc-e918-4953-880b-002d1313a0a5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.416348] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Got total of 10 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1111.416450] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.416648] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 498b6bd7-03d8-44e7-b007-27d86afcb028 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.416877] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.416986] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 4a3639c7-8795-4702-a729-8239b0d55d51 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.417161] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid e682e67f-5a36-4851-b870-7099d7db119d {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.417313] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid a9b1ef96-1409-4700-a1bb-4aec1691a0fd {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.417460] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 01d865c8-ed85-45ec-aac6-bf923cd52dfa {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.417606] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid b866307e-f0e9-40d0-8603-fbfb9e2ee15a {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.417778] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.417999] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid b53bed7e-5e76-4aa5-abe2-b05750497404 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1111.418366] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.418609] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "498b6bd7-03d8-44e7-b007-27d86afcb028" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.418810] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.419092] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "4a3639c7-8795-4702-a729-8239b0d55d51" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.419260] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "e682e67f-5a36-4851-b870-7099d7db119d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.419469] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.419661] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.419855] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.420063] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.420256] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "b53bed7e-5e76-4aa5-abe2-b05750497404" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.763539] env[62507]: WARNING oslo_vmware.rw_handles [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1129.763539] env[62507]: ERROR oslo_vmware.rw_handles [ 1129.764182] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1129.765851] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1129.766107] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Copying Virtual Disk [datastore2] vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/5ac1b735-1272-442a-80c1-1f67d6a3c78e/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1129.766388] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f166874f-3d0a-4f7c-b265-1fbadd6dcc5f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.774187] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1129.774187] env[62507]: value = "task-2460020" [ 1129.774187] env[62507]: _type = "Task" [ 1129.774187] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.782247] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.284677] env[62507]: DEBUG oslo_vmware.exceptions [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1130.284677] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.285241] env[62507]: ERROR nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1130.285241] env[62507]: Faults: ['InvalidArgument'] [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Traceback (most recent call last): [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] yield resources [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self.driver.spawn(context, instance, image_meta, [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self._fetch_image_if_missing(context, vi) [ 1130.285241] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] image_cache(vi, tmp_image_ds_loc) [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] vm_util.copy_virtual_disk( [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] session._wait_for_task(vmdk_copy_task) [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] return self.wait_for_task(task_ref) [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] return evt.wait() [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] result = hub.switch() [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1130.285601] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] return self.greenlet.switch() [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self.f(*self.args, **self.kw) [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] raise exceptions.translate_fault(task_info.error) [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Faults: ['InvalidArgument'] [ 1130.285928] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] [ 1130.285928] env[62507]: INFO nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Terminating instance [ 1130.287164] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.287376] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1130.287612] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6482e44a-e97a-4676-a10c-31de97b1e17b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.289731] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1130.289927] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1130.290646] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5731ca-88f4-4f3a-a558-7ad4e64b7612 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.297548] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1130.297780] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f9022eb-1111-49e4-a04a-4d0e0b29c540 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.299834] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1130.300025] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1130.300968] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88e7749c-560b-474d-8a8e-eed977f80008 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.305369] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 1130.305369] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f0ecbe-a403-8fb9-ea03-0a6419ab9739" [ 1130.305369] env[62507]: _type = "Task" [ 1130.305369] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.312290] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f0ecbe-a403-8fb9-ea03-0a6419ab9739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.365695] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1130.365925] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1130.366266] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleting the datastore file [datastore2] 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1130.366408] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11e58543-71b1-43d1-adfc-2946cf392c53 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.372882] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1130.372882] env[62507]: value = "task-2460022" [ 1130.372882] env[62507]: _type = "Task" [ 1130.372882] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.380482] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.815043] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1130.815286] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating directory with path [datastore2] vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1130.815477] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29a6c450-3da9-4408-bdf2-dfe2be598339 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.826518] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Created directory with path [datastore2] vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1130.826713] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Fetch image to [datastore2] vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1130.826914] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1130.827641] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fcaa3a4-42de-4646-a251-1448277df6dd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.833918] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb8f48f-5a74-436f-899c-c01b401434e9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.842610] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beaa5539-a1ba-4057-b15a-dd275b9c95ff {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.873634] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62559b9f-0d1a-4590-9342-959bf0ca5cc7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.884368] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-81381386-1923-44d3-80d3-a2252a65bfaa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.885996] env[62507]: DEBUG oslo_vmware.api [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067257} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.886258] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.886441] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1130.886604] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1130.886775] env[62507]: INFO nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1130.888911] env[62507]: DEBUG nova.compute.claims [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1130.889130] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.889346] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.971081] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1131.019624] env[62507]: DEBUG oslo_vmware.rw_handles [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1131.080150] env[62507]: DEBUG oslo_vmware.rw_handles [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1131.080428] env[62507]: DEBUG oslo_vmware.rw_handles [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1131.273550] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb55ba0e-c1eb-4ca4-9b6e-3cabd41d4991 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.281264] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24096d38-622c-4537-9166-89d25319a1f6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.312270] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92eb108d-edcb-4374-a90a-f561745b8e42 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.318713] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7acac2-0678-4239-8b3d-35a8d14e8fdb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.331084] env[62507]: DEBUG nova.compute.provider_tree [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.339694] env[62507]: DEBUG nova.scheduler.client.report [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1131.353347] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.464s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.353857] env[62507]: ERROR nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1131.353857] env[62507]: Faults: ['InvalidArgument'] [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Traceback (most recent call last): [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self.driver.spawn(context, instance, image_meta, [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self._fetch_image_if_missing(context, vi) [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] image_cache(vi, tmp_image_ds_loc) [ 1131.353857] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] vm_util.copy_virtual_disk( [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] session._wait_for_task(vmdk_copy_task) [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] return self.wait_for_task(task_ref) [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] return evt.wait() [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] result = hub.switch() [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] return self.greenlet.switch() [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1131.354243] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] self.f(*self.args, **self.kw) [ 1131.354618] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1131.354618] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] raise exceptions.translate_fault(task_info.error) [ 1131.354618] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1131.354618] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Faults: ['InvalidArgument'] [ 1131.354618] env[62507]: ERROR nova.compute.manager [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] [ 1131.354618] env[62507]: DEBUG nova.compute.utils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1131.356161] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Build of instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 was re-scheduled: A specified parameter was not correct: fileType [ 1131.356161] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1131.356297] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1131.356451] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1131.356625] env[62507]: DEBUG nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1131.356786] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1131.692458] env[62507]: DEBUG nova.network.neutron [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.726259] env[62507]: INFO nova.compute.manager [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Took 0.37 seconds to deallocate network for instance. [ 1131.826595] env[62507]: INFO nova.scheduler.client.report [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleted allocations for instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 [ 1132.014500] env[62507]: DEBUG oslo_concurrency.lockutils [None req-97e2b145-720f-4055-85d9-102615a334f9 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 565.399s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.015660] env[62507]: DEBUG oslo_concurrency.lockutils [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 366.818s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.015931] env[62507]: DEBUG oslo_concurrency.lockutils [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.016600] env[62507]: DEBUG oslo_concurrency.lockutils [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.016923] env[62507]: DEBUG oslo_concurrency.lockutils [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.018928] env[62507]: INFO nova.compute.manager [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Terminating instance [ 1132.020830] env[62507]: DEBUG nova.compute.manager [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1132.021893] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1132.021893] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-233464c6-0137-476b-930e-d034097c1454 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.032374] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ea265b-04b2-4ca9-b31f-74bb44ff466e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.044678] env[62507]: DEBUG nova.compute.manager [None req-cefe5265-47e3-4f46-8a41-a3997bd6a45d tempest-ServerDiagnosticsTest-1845157807 tempest-ServerDiagnosticsTest-1845157807-project-member] [instance: a68f6eb0-a549-4c52-b349-bcbc8e2b8669] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1132.066098] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7 could not be found. [ 1132.066332] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1132.066527] env[62507]: INFO nova.compute.manager [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1132.066801] env[62507]: DEBUG oslo.service.loopingcall [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.067989] env[62507]: DEBUG nova.compute.manager [-] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1132.067989] env[62507]: DEBUG nova.network.neutron [-] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1132.076953] env[62507]: DEBUG nova.compute.manager [None req-cefe5265-47e3-4f46-8a41-a3997bd6a45d tempest-ServerDiagnosticsTest-1845157807 tempest-ServerDiagnosticsTest-1845157807-project-member] [instance: a68f6eb0-a549-4c52-b349-bcbc8e2b8669] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1132.096485] env[62507]: DEBUG nova.network.neutron [-] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.100939] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cefe5265-47e3-4f46-8a41-a3997bd6a45d tempest-ServerDiagnosticsTest-1845157807 tempest-ServerDiagnosticsTest-1845157807-project-member] Lock "a68f6eb0-a549-4c52-b349-bcbc8e2b8669" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.992s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.107311] env[62507]: INFO nova.compute.manager [-] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] Took 0.04 seconds to deallocate network for instance. [ 1132.113296] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1132.173129] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.173129] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.174547] env[62507]: INFO nova.compute.claims [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1132.211363] env[62507]: DEBUG oslo_concurrency.lockutils [None req-79497b52-0c5d-4784-8ce3-447f5ea729b5 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.196s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.214648] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.796s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.214959] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9b0a9f16-c53e-41b8-a473-b1eff1ad41c7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1132.215394] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "9b0a9f16-c53e-41b8-a473-b1eff1ad41c7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.517452] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea68088b-7648-451c-ba2b-ede73793692d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.524976] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416f0e05-c72e-4218-aa48-08a97e49364c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.554362] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74929a65-9f06-4395-93cb-1a1c29363213 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.561331] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a09e83-6d94-4fbb-9c03-110f952db9eb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.575172] env[62507]: DEBUG nova.compute.provider_tree [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.583760] env[62507]: DEBUG nova.scheduler.client.report [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1132.599735] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.427s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.600214] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1132.634449] env[62507]: DEBUG nova.compute.utils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1132.635962] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1132.636151] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1132.645636] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1132.713284] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1132.743199] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:23:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='fda51085-9bb5-41c4-8bcc-b01baa3aa74b',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1275401676',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1132.743479] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1132.743613] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1132.743792] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1132.744225] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1132.744418] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1132.744636] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1132.744803] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1132.744993] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1132.745189] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1132.745385] env[62507]: DEBUG nova.virt.hardware [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1132.746253] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4ae2c7-e6c8-4974-ae37-30b5d233dd10 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.750862] env[62507]: DEBUG nova.policy [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6f5fea623c94f7b816f42501eeb1db2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b508b5886b148c0a8c913a053d839bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1132.758193] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8532182-4764-4371-9c23-803257257e86 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.847217] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.128417] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Successfully created port: f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1134.072440] env[62507]: DEBUG nova.compute.manager [req-a8b48668-7870-4611-b2ea-5e07acf10366 req-09e53196-98e1-4035-a13d-f7bc78684599 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Received event network-vif-plugged-f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1134.072709] env[62507]: DEBUG oslo_concurrency.lockutils [req-a8b48668-7870-4611-b2ea-5e07acf10366 req-09e53196-98e1-4035-a13d-f7bc78684599 service nova] Acquiring lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.072874] env[62507]: DEBUG oslo_concurrency.lockutils [req-a8b48668-7870-4611-b2ea-5e07acf10366 req-09e53196-98e1-4035-a13d-f7bc78684599 service nova] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.073055] env[62507]: DEBUG oslo_concurrency.lockutils [req-a8b48668-7870-4611-b2ea-5e07acf10366 req-09e53196-98e1-4035-a13d-f7bc78684599 service nova] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.073229] env[62507]: DEBUG nova.compute.manager [req-a8b48668-7870-4611-b2ea-5e07acf10366 req-09e53196-98e1-4035-a13d-f7bc78684599 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] No waiting events found dispatching network-vif-plugged-f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1134.073393] env[62507]: WARNING nova.compute.manager [req-a8b48668-7870-4611-b2ea-5e07acf10366 req-09e53196-98e1-4035-a13d-f7bc78684599 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Received unexpected event network-vif-plugged-f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a for instance with vm_state building and task_state deleting. [ 1134.159223] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Successfully updated port: f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.175568] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.175714] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.175868] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1134.238803] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1134.429653] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Updating instance_info_cache with network_info: [{"id": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "address": "fa:16:3e:53:da:67", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf733ac33-ae", "ovs_interfaceid": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.452441] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.452763] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance network_info: |[{"id": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "address": "fa:16:3e:53:da:67", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf733ac33-ae", "ovs_interfaceid": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1134.453201] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:da:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1134.460806] env[62507]: DEBUG oslo.service.loopingcall [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1134.461362] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1134.461589] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-884a0bd3-fa63-4b3c-b0ef-aecc70b71821 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.492428] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1134.492428] env[62507]: value = "task-2460023" [ 1134.492428] env[62507]: _type = "Task" [ 1134.492428] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.501339] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460023, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.000845] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460023, 'name': CreateVM_Task, 'duration_secs': 0.28102} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.001404] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1135.002084] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.002255] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.002564] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1135.002833] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03e1d01a-38b9-4570-8131-93b0d43de0de {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.007518] env[62507]: DEBUG oslo_vmware.api [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 1135.007518] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52b40a49-c904-a6f5-13ce-2d9dc141eab1" [ 1135.007518] env[62507]: _type = "Task" [ 1135.007518] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.015395] env[62507]: DEBUG oslo_vmware.api [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52b40a49-c904-a6f5-13ce-2d9dc141eab1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.525195] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.525486] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1135.525657] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.328576] env[62507]: DEBUG nova.compute.manager [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Received event network-changed-f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1136.328784] env[62507]: DEBUG nova.compute.manager [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Refreshing instance network info cache due to event network-changed-f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1136.329008] env[62507]: DEBUG oslo_concurrency.lockutils [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] Acquiring lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.329169] env[62507]: DEBUG oslo_concurrency.lockutils [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] Acquired lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.329399] env[62507]: DEBUG nova.network.neutron [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Refreshing network info cache for port f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1136.793876] env[62507]: DEBUG nova.network.neutron [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Updated VIF entry in instance network info cache for port f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1136.793876] env[62507]: DEBUG nova.network.neutron [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Updating instance_info_cache with network_info: [{"id": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "address": "fa:16:3e:53:da:67", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf733ac33-ae", "ovs_interfaceid": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.802381] env[62507]: DEBUG oslo_concurrency.lockutils [req-3b49cf58-d2b6-4353-8242-60e9fefb98a8 req-a7449932-22a7-4540-b547-9a46246649d5 service nova] Releasing lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.993124] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "65efc608-6573-4690-8d11-2f0459647d70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.993385] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "65efc608-6573-4690-8d11-2f0459647d70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.212375] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.691426] env[62507]: DEBUG oslo_concurrency.lockutils [None req-53c2f539-908b-4a74-ab4b-a1d413892359 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "e04f6623-4547-4095-a575-67eae0bbd289" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.692146] env[62507]: DEBUG oslo_concurrency.lockutils [None req-53c2f539-908b-4a74-ab4b-a1d413892359 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "e04f6623-4547-4095-a575-67eae0bbd289" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.163848] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.167131] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.170027] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.169458] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.167636] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.167971] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1168.167971] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1168.191617] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.191768] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.191911] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.192102] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.192308] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.192548] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.192698] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.192827] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.192989] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.193784] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1168.193784] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1168.193946] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.194130] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1169.167191] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.184753] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.185142] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.185142] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.185142] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1169.187405] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02441b87-56d1-4204-b0e4-02ee30d7e59d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.197475] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb27127-d500-4a68-b34b-6da9d142a58c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.217157] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795f1a14-d980-4f31-a2bb-284e7ef82d1c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.226326] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb3f9d5-4524-4819-bfe5-db758c4eeffe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.259649] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181176MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1169.259649] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.259807] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.358494] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.358663] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.358792] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4a3639c7-8795-4702-a729-8239b0d55d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.358917] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.359053] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.359182] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.359303] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.359417] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.359665] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.359791] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.371611] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.384925] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b86fd157-ec5a-4e61-967a-c7cdd86bfea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.396253] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 2e7dfd95-dc72-4dd8-9602-dd1af3d330a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.408645] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b49dd64-781d-48c8-ac86-0c523b39f99a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.419777] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f70eaaec-66d0-4ec0-b947-3eaa9d6038ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.439067] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.451509] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.462611] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.473530] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0bc7792e-d291-46ef-9ac1-420959c38191 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.487666] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.504615] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.523496] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e04f6623-4547-4095-a575-67eae0bbd289 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1169.523496] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1169.523496] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1169.888017] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12620d7d-57ef-4a02-8762-bda24fe903aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.897918] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae553d8-b58a-416d-9e8f-e79def0bf4d1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.906113] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f683e774-3ab9-4d87-b763-dc93afafb0bf tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] Acquiring lock "0600b5ad-334a-41e3-add2-4f8458040774" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.906495] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f683e774-3ab9-4d87-b763-dc93afafb0bf tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] Lock "0600b5ad-334a-41e3-add2-4f8458040774" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.933595] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a612462d-5fc3-47b7-b803-b54cdbfba1d7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.941688] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053b8528-3a63-48fd-be01-17fa4ce7e10b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.955435] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.972070] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1169.991834] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1169.991834] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.732s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.641771] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8272ccb2-8152-4e44-8d4c-7f9f664557a9 tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] Acquiring lock "b35972e0-4a7b-4b2e-940e-2d9f40b0e55f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.642401] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8272ccb2-8152-4e44-8d4c-7f9f664557a9 tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] Lock "b35972e0-4a7b-4b2e-940e-2d9f40b0e55f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.931741] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d9f1f7e4-34e9-45f6-b2a1-309455c342b1 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "ef76c5ad-176d-44c0-891a-66333b42d0ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.931987] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d9f1f7e4-34e9-45f6-b2a1-309455c342b1 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "ef76c5ad-176d-44c0-891a-66333b42d0ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.992243] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.009604] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aaaf678d-76c9-4d6a-9f84-d2c3f5bd1cdb tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] Acquiring lock "a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.009844] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aaaf678d-76c9-4d6a-9f84-d2c3f5bd1cdb tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] Lock "a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.895864] env[62507]: WARNING oslo_vmware.rw_handles [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1176.895864] env[62507]: ERROR oslo_vmware.rw_handles [ 1176.896512] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1176.898466] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1176.898722] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Copying Virtual Disk [datastore2] vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/29877332-0289-4618-99a8-5d9bc8fe8f55/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1176.899013] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b01265a-f33a-426c-90b5-2ca8199400b6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.906598] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 1176.906598] env[62507]: value = "task-2460024" [ 1176.906598] env[62507]: _type = "Task" [ 1176.906598] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.914530] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': task-2460024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.417735] env[62507]: DEBUG oslo_vmware.exceptions [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1177.418090] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.418679] env[62507]: ERROR nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1177.418679] env[62507]: Faults: ['InvalidArgument'] [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Traceback (most recent call last): [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] yield resources [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self.driver.spawn(context, instance, image_meta, [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self._fetch_image_if_missing(context, vi) [ 1177.418679] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] image_cache(vi, tmp_image_ds_loc) [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] vm_util.copy_virtual_disk( [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] session._wait_for_task(vmdk_copy_task) [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] return self.wait_for_task(task_ref) [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] return evt.wait() [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] result = hub.switch() [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1177.419087] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] return self.greenlet.switch() [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self.f(*self.args, **self.kw) [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] raise exceptions.translate_fault(task_info.error) [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Faults: ['InvalidArgument'] [ 1177.419570] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] [ 1177.419570] env[62507]: INFO nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Terminating instance [ 1177.420571] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.420784] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1177.421062] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cccdfab-d3dd-41e8-97c0-cdd419ff215d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.423379] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1177.423569] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1177.424321] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd2b9f7-8364-4e2c-a9e8-5c4267bbc77e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.431226] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1177.431456] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cefb6e14-713f-4e64-a161-69ca3a28a480 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.433519] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1177.433694] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1177.434610] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4d60520-f244-45dc-a7b0-de5be3d03d2b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.440023] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Waiting for the task: (returnval){ [ 1177.440023] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]522a40ab-2833-2b3c-8cf0-44242f8d306c" [ 1177.440023] env[62507]: _type = "Task" [ 1177.440023] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.453584] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1177.453832] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Creating directory with path [datastore2] vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1177.454062] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-186132a5-c310-45fb-9a97-a66c765c44d8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.474442] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Created directory with path [datastore2] vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1177.474718] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Fetch image to [datastore2] vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1177.474939] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1177.475845] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91aecd4c-5c04-410a-8755-f6ac9e6a7a3f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.483021] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ad0067-24e0-43a7-9b69-b562e3846b76 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.492133] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f50d0d5-bde3-4116-9595-45b0e94060de {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.497450] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1177.497799] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1177.498059] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Deleting the datastore file [datastore2] 498b6bd7-03d8-44e7-b007-27d86afcb028 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1177.498643] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9313bfa2-ebe7-4578-adc0-4ce11bc3b3ed {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.527386] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94734681-99f8-402d-969d-1ac0314d4510 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.530038] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for the task: (returnval){ [ 1177.530038] env[62507]: value = "task-2460026" [ 1177.530038] env[62507]: _type = "Task" [ 1177.530038] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.535115] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5604955f-9b26-40a2-ba92-1b1530562ebf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.539357] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': task-2460026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.558407] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1177.694880] env[62507]: DEBUG oslo_vmware.rw_handles [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1177.757714] env[62507]: DEBUG oslo_vmware.rw_handles [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1177.757969] env[62507]: DEBUG oslo_vmware.rw_handles [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1178.040816] env[62507]: DEBUG oslo_vmware.api [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Task: {'id': task-2460026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07087} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.041190] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.041276] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1178.041453] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1178.041628] env[62507]: INFO nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1178.044199] env[62507]: DEBUG nova.compute.claims [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1178.044382] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.044597] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.377089] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bca4ed-773d-4f41-a610-5f719fd431c4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.384640] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1faa47c-132d-4939-8be4-a03150b0b16f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.414084] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5137b9-4a8b-4617-a297-a2af171db4b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.420970] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3357ea51-ab81-43ed-af8b-3d3ea164f1fd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.433748] env[62507]: DEBUG nova.compute.provider_tree [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.442388] env[62507]: DEBUG nova.scheduler.client.report [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1178.458185] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.412s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.458185] env[62507]: ERROR nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1178.458185] env[62507]: Faults: ['InvalidArgument'] [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Traceback (most recent call last): [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self.driver.spawn(context, instance, image_meta, [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1178.458185] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self._fetch_image_if_missing(context, vi) [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] image_cache(vi, tmp_image_ds_loc) [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] vm_util.copy_virtual_disk( [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] session._wait_for_task(vmdk_copy_task) [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] return self.wait_for_task(task_ref) [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] return evt.wait() [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] result = hub.switch() [ 1178.458582] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] return self.greenlet.switch() [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] self.f(*self.args, **self.kw) [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] raise exceptions.translate_fault(task_info.error) [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Faults: ['InvalidArgument'] [ 1178.458941] env[62507]: ERROR nova.compute.manager [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] [ 1178.458941] env[62507]: DEBUG nova.compute.utils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1178.459893] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Build of instance 498b6bd7-03d8-44e7-b007-27d86afcb028 was re-scheduled: A specified parameter was not correct: fileType [ 1178.459893] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1178.460274] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1178.460450] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1178.460623] env[62507]: DEBUG nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1178.460829] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1178.959285] env[62507]: DEBUG nova.network.neutron [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.978704] env[62507]: INFO nova.compute.manager [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Took 0.52 seconds to deallocate network for instance. [ 1179.093683] env[62507]: INFO nova.scheduler.client.report [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Deleted allocations for instance 498b6bd7-03d8-44e7-b007-27d86afcb028 [ 1179.117985] env[62507]: DEBUG oslo_concurrency.lockutils [None req-af0f1cc0-e933-4ff3-818e-94299715a407 tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 610.603s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.119400] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 413.290s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.119820] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Acquiring lock "498b6bd7-03d8-44e7-b007-27d86afcb028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.120175] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.120504] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.122755] env[62507]: INFO nova.compute.manager [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Terminating instance [ 1179.124572] env[62507]: DEBUG nova.compute.manager [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1179.124768] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1179.125279] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31186d78-e33d-4c94-aa1e-071c620c9e85 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.130647] env[62507]: DEBUG nova.compute.manager [None req-9def2483-ef27-449f-a3aa-911eed733f41 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: dcf96348-5199-4c3f-9661-5ac0924c5b96] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1179.137361] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439bc91d-6bf4-4e7b-b642-b33547ed86cd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.166337] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 498b6bd7-03d8-44e7-b007-27d86afcb028 could not be found. [ 1179.166557] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1179.166740] env[62507]: INFO nova.compute.manager [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1179.167056] env[62507]: DEBUG oslo.service.loopingcall [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1179.167329] env[62507]: DEBUG nova.compute.manager [-] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1179.167427] env[62507]: DEBUG nova.network.neutron [-] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1179.181374] env[62507]: DEBUG nova.compute.manager [None req-9def2483-ef27-449f-a3aa-911eed733f41 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: dcf96348-5199-4c3f-9661-5ac0924c5b96] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1179.193010] env[62507]: DEBUG nova.network.neutron [-] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.200587] env[62507]: INFO nova.compute.manager [-] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] Took 0.03 seconds to deallocate network for instance. [ 1179.205574] env[62507]: DEBUG oslo_concurrency.lockutils [None req-9def2483-ef27-449f-a3aa-911eed733f41 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "dcf96348-5199-4c3f-9661-5ac0924c5b96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.901s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.213826] env[62507]: DEBUG nova.compute.manager [None req-85e399cb-eacf-418f-a4b1-f6ea361ab803 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 9da52346-c500-4335-8f4c-39cf56322589] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1179.245058] env[62507]: DEBUG nova.compute.manager [None req-85e399cb-eacf-418f-a4b1-f6ea361ab803 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] [instance: 9da52346-c500-4335-8f4c-39cf56322589] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1179.266061] env[62507]: DEBUG oslo_concurrency.lockutils [None req-85e399cb-eacf-418f-a4b1-f6ea361ab803 tempest-SecurityGroupsTestJSON-1700924672 tempest-SecurityGroupsTestJSON-1700924672-project-member] Lock "9da52346-c500-4335-8f4c-39cf56322589" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.867s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.277759] env[62507]: DEBUG nova.compute.manager [None req-57ba5bb7-7a1c-4746-a7c7-a42f35068b52 tempest-ImagesNegativeTestJSON-1013885382 tempest-ImagesNegativeTestJSON-1013885382-project-member] [instance: b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1179.301270] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4706e1b1-54e0-485e-881e-f572ab22629b tempest-ServersAdminTestJSON-1645104492 tempest-ServersAdminTestJSON-1645104492-project-member] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.302091] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 67.883s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.302372] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 498b6bd7-03d8-44e7-b007-27d86afcb028] During sync_power_state the instance has a pending task (deleting). Skip. [ 1179.302505] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "498b6bd7-03d8-44e7-b007-27d86afcb028" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.303336] env[62507]: DEBUG nova.compute.manager [None req-57ba5bb7-7a1c-4746-a7c7-a42f35068b52 tempest-ImagesNegativeTestJSON-1013885382 tempest-ImagesNegativeTestJSON-1013885382-project-member] [instance: b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1179.322292] env[62507]: DEBUG oslo_concurrency.lockutils [None req-57ba5bb7-7a1c-4746-a7c7-a42f35068b52 tempest-ImagesNegativeTestJSON-1013885382 tempest-ImagesNegativeTestJSON-1013885382-project-member] Lock "b0e3f575-9a49-4bb0-af5f-58bdb5ca0aa5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.422s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.330045] env[62507]: DEBUG nova.compute.manager [None req-7617d66a-5f99-416c-a17b-4d4dcdc6b99d tempest-ServerPasswordTestJSON-239737907 tempest-ServerPasswordTestJSON-239737907-project-member] [instance: 9e1c954f-3a25-46f8-a34b-9fa859053951] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1179.352454] env[62507]: DEBUG nova.compute.manager [None req-7617d66a-5f99-416c-a17b-4d4dcdc6b99d tempest-ServerPasswordTestJSON-239737907 tempest-ServerPasswordTestJSON-239737907-project-member] [instance: 9e1c954f-3a25-46f8-a34b-9fa859053951] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1179.374627] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7617d66a-5f99-416c-a17b-4d4dcdc6b99d tempest-ServerPasswordTestJSON-239737907 tempest-ServerPasswordTestJSON-239737907-project-member] Lock "9e1c954f-3a25-46f8-a34b-9fa859053951" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.061s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.383176] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1179.435953] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.436243] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.437908] env[62507]: INFO nova.compute.claims [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1179.826752] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f9085f-492b-47c0-94f4-ccd42f009350 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.836028] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ff6c88-ebe8-4f32-9e45-1ffbd46f64dc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.864453] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c471e6b2-b27e-42f5-a895-97579ced4c87 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.871471] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3d4016-3cb8-4613-a940-b9cc3c5f6556 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.884030] env[62507]: DEBUG nova.compute.provider_tree [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.892598] env[62507]: DEBUG nova.scheduler.client.report [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1179.906543] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.470s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.907033] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1179.942411] env[62507]: DEBUG nova.compute.utils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1179.944136] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1179.944253] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1179.954456] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1180.010556] env[62507]: DEBUG nova.policy [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a05f77d144740b0a37ca55fe163a511', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '24f3f26978fa490fa5fe8dcd8573c61e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1180.016159] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1180.041750] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1180.042026] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1180.042219] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1180.042406] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1180.042553] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1180.042699] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1180.042906] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1180.043083] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1180.043257] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1180.043462] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1180.043653] env[62507]: DEBUG nova.virt.hardware [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1180.044528] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a5ff17-188f-4251-89eb-b6402c7535e2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.052788] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22352322-bae1-44cb-9b9a-8d56a2e10e61 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.385778] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Successfully created port: 3022bb04-aaf4-4b19-af8d-94bbbfabbf9f {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1181.091108] env[62507]: DEBUG nova.compute.manager [req-05433d65-3060-46cc-bf15-b80096b781d5 req-23ac8e1a-9a44-4235-90fb-615839c31712 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Received event network-vif-plugged-3022bb04-aaf4-4b19-af8d-94bbbfabbf9f {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1181.091344] env[62507]: DEBUG oslo_concurrency.lockutils [req-05433d65-3060-46cc-bf15-b80096b781d5 req-23ac8e1a-9a44-4235-90fb-615839c31712 service nova] Acquiring lock "3627bbf7-507f-4345-b093-3b4f5bb45eae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.091559] env[62507]: DEBUG oslo_concurrency.lockutils [req-05433d65-3060-46cc-bf15-b80096b781d5 req-23ac8e1a-9a44-4235-90fb-615839c31712 service nova] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.091756] env[62507]: DEBUG oslo_concurrency.lockutils [req-05433d65-3060-46cc-bf15-b80096b781d5 req-23ac8e1a-9a44-4235-90fb-615839c31712 service nova] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.091918] env[62507]: DEBUG nova.compute.manager [req-05433d65-3060-46cc-bf15-b80096b781d5 req-23ac8e1a-9a44-4235-90fb-615839c31712 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] No waiting events found dispatching network-vif-plugged-3022bb04-aaf4-4b19-af8d-94bbbfabbf9f {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1181.092343] env[62507]: WARNING nova.compute.manager [req-05433d65-3060-46cc-bf15-b80096b781d5 req-23ac8e1a-9a44-4235-90fb-615839c31712 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Received unexpected event network-vif-plugged-3022bb04-aaf4-4b19-af8d-94bbbfabbf9f for instance with vm_state building and task_state spawning. [ 1181.187597] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Successfully updated port: 3022bb04-aaf4-4b19-af8d-94bbbfabbf9f {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1181.195516] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "refresh_cache-3627bbf7-507f-4345-b093-3b4f5bb45eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.195840] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "refresh_cache-3627bbf7-507f-4345-b093-3b4f5bb45eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.196088] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1181.239956] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1181.645026] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Updating instance_info_cache with network_info: [{"id": "3022bb04-aaf4-4b19-af8d-94bbbfabbf9f", "address": "fa:16:3e:53:e4:6a", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3022bb04-aa", "ovs_interfaceid": "3022bb04-aaf4-4b19-af8d-94bbbfabbf9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.661386] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "refresh_cache-3627bbf7-507f-4345-b093-3b4f5bb45eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.663052] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance network_info: |[{"id": "3022bb04-aaf4-4b19-af8d-94bbbfabbf9f", "address": "fa:16:3e:53:e4:6a", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3022bb04-aa", "ovs_interfaceid": "3022bb04-aaf4-4b19-af8d-94bbbfabbf9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1181.663481] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:e4:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3022bb04-aaf4-4b19-af8d-94bbbfabbf9f', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1181.670852] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating folder: Project (24f3f26978fa490fa5fe8dcd8573c61e). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1181.671397] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a7d58e9-8f23-47e0-839a-873d5ab8c208 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.682420] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created folder: Project (24f3f26978fa490fa5fe8dcd8573c61e) in parent group-v497991. [ 1181.682607] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating folder: Instances. Parent ref: group-v498064. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1181.682833] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a45d891-8c75-4b3a-8460-179913df227c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.691669] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created folder: Instances in parent group-v498064. [ 1181.691784] env[62507]: DEBUG oslo.service.loopingcall [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1181.691948] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1181.692168] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-222531bc-dac3-44cb-a6e4-a4f87af21fc8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.711040] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1181.711040] env[62507]: value = "task-2460029" [ 1181.711040] env[62507]: _type = "Task" [ 1181.711040] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.718262] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460029, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.221065] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460029, 'name': CreateVM_Task, 'duration_secs': 0.289805} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.221216] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1182.227531] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.227694] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.228020] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1182.228289] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40022433-08d9-40ec-9469-691936a4027c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.232593] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 1182.232593] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520a761a-73e1-535a-d146-963ceb958d8c" [ 1182.232593] env[62507]: _type = "Task" [ 1182.232593] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.239850] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520a761a-73e1-535a-d146-963ceb958d8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.744953] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.744953] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1182.744953] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.153976] env[62507]: DEBUG nova.compute.manager [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Received event network-changed-3022bb04-aaf4-4b19-af8d-94bbbfabbf9f {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1183.154206] env[62507]: DEBUG nova.compute.manager [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Refreshing instance network info cache due to event network-changed-3022bb04-aaf4-4b19-af8d-94bbbfabbf9f. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1183.154431] env[62507]: DEBUG oslo_concurrency.lockutils [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] Acquiring lock "refresh_cache-3627bbf7-507f-4345-b093-3b4f5bb45eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.154541] env[62507]: DEBUG oslo_concurrency.lockutils [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] Acquired lock "refresh_cache-3627bbf7-507f-4345-b093-3b4f5bb45eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.154706] env[62507]: DEBUG nova.network.neutron [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Refreshing network info cache for port 3022bb04-aaf4-4b19-af8d-94bbbfabbf9f {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1183.456435] env[62507]: DEBUG nova.network.neutron [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Updated VIF entry in instance network info cache for port 3022bb04-aaf4-4b19-af8d-94bbbfabbf9f. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1183.456789] env[62507]: DEBUG nova.network.neutron [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Updating instance_info_cache with network_info: [{"id": "3022bb04-aaf4-4b19-af8d-94bbbfabbf9f", "address": "fa:16:3e:53:e4:6a", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3022bb04-aa", "ovs_interfaceid": "3022bb04-aaf4-4b19-af8d-94bbbfabbf9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.466576] env[62507]: DEBUG oslo_concurrency.lockutils [req-f8eaaa19-f2d0-4350-b499-7352f74b929b req-34862543-9dc8-4a01-b23c-f159bd1222b2 service nova] Releasing lock "refresh_cache-3627bbf7-507f-4345-b093-3b4f5bb45eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.893165] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "3627bbf7-507f-4345-b093-3b4f5bb45eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.358923] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.358923] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.276501] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Acquiring lock "df12d1b1-3c2a-47f8-b8df-d9993acf8d82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.276914] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Lock "df12d1b1-3c2a-47f8-b8df-d9993acf8d82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.306999] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Acquiring lock "fb7d0d04-1c97-40e1-824c-25d04f87e468" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.307303] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Lock "fb7d0d04-1c97-40e1-824c-25d04f87e468" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.343227] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Acquiring lock "955e2c90-e317-4148-887d-e9a4eacdda2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.343357] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Lock "955e2c90-e317-4148-887d-e9a4eacdda2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.163611] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.167695] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.803279] env[62507]: WARNING oslo_vmware.rw_handles [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1224.803279] env[62507]: ERROR oslo_vmware.rw_handles [ 1224.803904] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1224.805739] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1224.805982] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Copying Virtual Disk [datastore2] vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/4fe9b19f-576a-4872-ac8a-e179f333d38d/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1224.806295] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45df7016-3b96-4c9b-8e94-9313eb8a04b3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.814690] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Waiting for the task: (returnval){ [ 1224.814690] env[62507]: value = "task-2460030" [ 1224.814690] env[62507]: _type = "Task" [ 1224.814690] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.822381] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Task: {'id': task-2460030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.162788] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.325411] env[62507]: DEBUG oslo_vmware.exceptions [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1225.325656] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.326357] env[62507]: ERROR nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1225.326357] env[62507]: Faults: ['InvalidArgument'] [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Traceback (most recent call last): [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] yield resources [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self.driver.spawn(context, instance, image_meta, [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self._fetch_image_if_missing(context, vi) [ 1225.326357] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] image_cache(vi, tmp_image_ds_loc) [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] vm_util.copy_virtual_disk( [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] session._wait_for_task(vmdk_copy_task) [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] return self.wait_for_task(task_ref) [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] return evt.wait() [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] result = hub.switch() [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1225.326775] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] return self.greenlet.switch() [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self.f(*self.args, **self.kw) [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] raise exceptions.translate_fault(task_info.error) [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Faults: ['InvalidArgument'] [ 1225.327165] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] [ 1225.327165] env[62507]: INFO nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Terminating instance [ 1225.328529] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.328743] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.329088] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62d41f48-5aeb-4860-91a8-d242776c835b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.331609] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1225.331806] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1225.332729] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c64c63-fa75-41e1-b0ad-b57156fc1860 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.340475] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1225.340764] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e151550-0c4d-4300-b49c-6c2b8dcd4acf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.343388] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.343610] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1225.344637] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83cf9094-3813-4b06-a7ac-7e7968c6d61e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.350124] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Waiting for the task: (returnval){ [ 1225.350124] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e2d3bb-1294-4cd0-2232-368ece6866c8" [ 1225.350124] env[62507]: _type = "Task" [ 1225.350124] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.358594] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e2d3bb-1294-4cd0-2232-368ece6866c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.415666] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1225.415905] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1225.416105] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Deleting the datastore file [datastore2] 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1225.416372] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58fc6039-085a-48fe-a7c4-43a78c48d310 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.423147] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Waiting for the task: (returnval){ [ 1225.423147] env[62507]: value = "task-2460032" [ 1225.423147] env[62507]: _type = "Task" [ 1225.423147] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.431618] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Task: {'id': task-2460032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.860727] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1225.861119] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Creating directory with path [datastore2] vmware_temp/649c9aac-9a51-4344-b30e-939c53de26ca/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.861170] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6900c595-3b26-45f1-846b-d240d1eb851c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.873346] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Created directory with path [datastore2] vmware_temp/649c9aac-9a51-4344-b30e-939c53de26ca/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.873614] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Fetch image to [datastore2] vmware_temp/649c9aac-9a51-4344-b30e-939c53de26ca/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1225.873808] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/649c9aac-9a51-4344-b30e-939c53de26ca/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1225.874597] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a70df8-f8f9-4303-9caa-09dfaef811d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.881564] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629bcf85-cbee-4512-91da-5012b533eb4f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.891102] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc24d05c-5580-4c65-9847-f0d8af94ece9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.923270] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487408f2-329d-48ae-9d6f-981a5f83c2ad {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.935293] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ea314331-d9aa-482f-9343-05290bfe544e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.937162] env[62507]: DEBUG oslo_vmware.api [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Task: {'id': task-2460032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076613} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.937405] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1225.937612] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1225.937792] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1225.937969] env[62507]: INFO nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1225.943861] env[62507]: DEBUG nova.compute.claims [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1225.944051] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.944273] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.960718] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1226.168409] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.190049] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.191702] env[62507]: ERROR nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = getattr(controller, method)(*args, **kwargs) [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._get(image_id) [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1226.191702] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] resp, body = self.http_client.get(url, headers=header) [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.request(url, 'GET', **kwargs) [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._handle_response(resp) [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exc.from_response(resp, resp.content) [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1226.192063] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] yield resources [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self.driver.spawn(context, instance, image_meta, [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._fetch_image_if_missing(context, vi) [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image_fetch(context, vi, tmp_image_ds_loc) [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] images.fetch_image( [ 1226.192448] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] metadata = IMAGE_API.get(context, image_ref) [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return session.show(context, image_id, [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] _reraise_translated_image_exception(image_id) [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise new_exc.with_traceback(exc_trace) [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = getattr(controller, method)(*args, **kwargs) [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1226.192882] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._get(image_id) [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] resp, body = self.http_client.get(url, headers=header) [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.request(url, 'GET', **kwargs) [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._handle_response(resp) [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exc.from_response(resp, resp.content) [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1226.193317] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1226.193654] env[62507]: INFO nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Terminating instance [ 1226.193654] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.193797] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1226.194479] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1226.194678] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1226.194911] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4abff778-d7c5-4bf3-a48a-4c57dec2d1c8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.198070] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0ec31b-68fe-40e9-ae18-022c38f55720 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.207740] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1226.207993] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9261ecf-c2aa-4743-a589-ddb68f83b518 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.210478] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1226.210681] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1226.211621] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41fe5f99-85f5-4e4d-8337-04874e05d50a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.219103] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Waiting for the task: (returnval){ [ 1226.219103] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5240dcae-c024-c319-4121-7db6d01e0de3" [ 1226.219103] env[62507]: _type = "Task" [ 1226.219103] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.227799] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5240dcae-c024-c319-4121-7db6d01e0de3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.279945] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1226.280190] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1226.280378] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Deleting the datastore file [datastore2] 4a3639c7-8795-4702-a729-8239b0d55d51 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1226.280798] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2477ebc3-d35b-421f-aa45-f9471e9c033f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.290213] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Waiting for the task: (returnval){ [ 1226.290213] env[62507]: value = "task-2460034" [ 1226.290213] env[62507]: _type = "Task" [ 1226.290213] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.298603] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Task: {'id': task-2460034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.351837] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dba90a-7656-47bb-bf4f-d465a9f45fdd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.359552] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ace104-a4a1-49e5-8f9e-33fa4dcad6a3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.392761] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc92ffa7-3edb-48ae-b512-5c3248b7c70e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.400754] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d853842f-8091-45b4-b952-8236fa127432 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.413886] env[62507]: DEBUG nova.compute.provider_tree [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.422246] env[62507]: DEBUG nova.scheduler.client.report [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1226.440033] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.496s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.440636] env[62507]: ERROR nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1226.440636] env[62507]: Faults: ['InvalidArgument'] [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Traceback (most recent call last): [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self.driver.spawn(context, instance, image_meta, [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self._fetch_image_if_missing(context, vi) [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] image_cache(vi, tmp_image_ds_loc) [ 1226.440636] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] vm_util.copy_virtual_disk( [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] session._wait_for_task(vmdk_copy_task) [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] return self.wait_for_task(task_ref) [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] return evt.wait() [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] result = hub.switch() [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] return self.greenlet.switch() [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1226.441030] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] self.f(*self.args, **self.kw) [ 1226.441744] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1226.441744] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] raise exceptions.translate_fault(task_info.error) [ 1226.441744] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1226.441744] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Faults: ['InvalidArgument'] [ 1226.441744] env[62507]: ERROR nova.compute.manager [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] [ 1226.441744] env[62507]: DEBUG nova.compute.utils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1226.442884] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Build of instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 was re-scheduled: A specified parameter was not correct: fileType [ 1226.442884] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1226.443280] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1226.443459] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1226.443636] env[62507]: DEBUG nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1226.443804] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1226.729847] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1226.730137] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Creating directory with path [datastore2] vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1226.730385] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-531d16b8-08eb-4126-89db-57d660c4d999 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.742814] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Created directory with path [datastore2] vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1226.743031] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Fetch image to [datastore2] vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1226.743219] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1226.743987] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9332de-8054-49a3-97ba-7d1c20b42988 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.751074] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645f0f41-eb31-4194-9e57-ee9913d2e532 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.760160] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34d88a4-f894-4545-ba06-ac88d77de40a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.791053] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14211eee-74dd-4209-896f-5fda6da6d131 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.802138] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f8b6204-2ca8-400c-b26c-4bcd300b3725 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.803823] env[62507]: DEBUG oslo_vmware.api [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Task: {'id': task-2460034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077569} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.804099] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.804292] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1226.804508] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1226.804705] env[62507]: INFO nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1226.806969] env[62507]: DEBUG nova.compute.claims [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1226.807164] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.807367] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.824419] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1226.891061] env[62507]: DEBUG oslo_vmware.rw_handles [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1226.956048] env[62507]: DEBUG oslo_vmware.rw_handles [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1226.956242] env[62507]: DEBUG oslo_vmware.rw_handles [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1226.962054] env[62507]: DEBUG nova.network.neutron [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.974742] env[62507]: INFO nova.compute.manager [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Took 0.53 seconds to deallocate network for instance. [ 1227.075315] env[62507]: INFO nova.scheduler.client.report [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Deleted allocations for instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 [ 1227.097989] env[62507]: DEBUG oslo_concurrency.lockutils [None req-df464e22-c9cf-4709-942d-60b6e9502630 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 656.874s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.099166] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 459.663s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.099386] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Acquiring lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.099596] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.099868] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.102059] env[62507]: INFO nova.compute.manager [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Terminating instance [ 1227.104047] env[62507]: DEBUG nova.compute.manager [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1227.104302] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1227.104727] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e5bd701-21f0-4c37-8661-b16c04a7314c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.117581] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aac07e-bb1a-4f57-89b4-2d03a997378b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.128638] env[62507]: DEBUG nova.compute.manager [None req-414f98db-771f-487b-99dc-05a2fed6786e tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: b86fd157-ec5a-4e61-967a-c7cdd86bfea1] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.152916] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29 could not be found. [ 1227.153146] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1227.153332] env[62507]: INFO nova.compute.manager [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1227.153580] env[62507]: DEBUG oslo.service.loopingcall [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.153809] env[62507]: DEBUG nova.compute.manager [-] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1227.153927] env[62507]: DEBUG nova.network.neutron [-] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1227.156515] env[62507]: DEBUG nova.compute.manager [None req-414f98db-771f-487b-99dc-05a2fed6786e tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: b86fd157-ec5a-4e61-967a-c7cdd86bfea1] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.166845] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.184469] env[62507]: DEBUG oslo_concurrency.lockutils [None req-414f98db-771f-487b-99dc-05a2fed6786e tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "b86fd157-ec5a-4e61-967a-c7cdd86bfea1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.691s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.187849] env[62507]: DEBUG nova.network.neutron [-] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.193410] env[62507]: DEBUG nova.compute.manager [None req-4b797e5c-45e9-4ae5-a441-60c74e701749 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] [instance: 2e7dfd95-dc72-4dd8-9602-dd1af3d330a0] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.198045] env[62507]: INFO nova.compute.manager [-] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] Took 0.04 seconds to deallocate network for instance. [ 1227.220644] env[62507]: DEBUG nova.compute.manager [None req-4b797e5c-45e9-4ae5-a441-60c74e701749 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] [instance: 2e7dfd95-dc72-4dd8-9602-dd1af3d330a0] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.239921] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b797e5c-45e9-4ae5-a441-60c74e701749 tempest-AttachInterfacesTestJSON-489314814 tempest-AttachInterfacesTestJSON-489314814-project-member] Lock "2e7dfd95-dc72-4dd8-9602-dd1af3d330a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.579s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.251443] env[62507]: DEBUG nova.compute.manager [None req-86683ef0-6915-4f80-8e2e-6f87664e8e9f tempest-ServersListShow296Test-1840877548 tempest-ServersListShow296Test-1840877548-project-member] [instance: ef130396-4736-4601-9024-6f562d5af828] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.274688] env[62507]: DEBUG nova.compute.manager [None req-86683ef0-6915-4f80-8e2e-6f87664e8e9f tempest-ServersListShow296Test-1840877548 tempest-ServersListShow296Test-1840877548-project-member] [instance: ef130396-4736-4601-9024-6f562d5af828] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.290181] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4f6678-8b96-47bc-a71c-a3984cbdd8b7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.294714] env[62507]: DEBUG oslo_concurrency.lockutils [None req-b3b91df7-7511-4f1b-b485-1bd575dbea67 tempest-AttachInterfacesUnderV243Test-38528730 tempest-AttachInterfacesUnderV243Test-38528730-project-member] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.295950] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 115.877s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.296163] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29] During sync_power_state the instance has a pending task (deleting). Skip. [ 1227.296340] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "2827c296-0e66-4ad3-a1b6-0ce5dbc6ff29" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.301474] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7263b446-5c8f-410b-b82c-3108edba0cc6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.305651] env[62507]: DEBUG oslo_concurrency.lockutils [None req-86683ef0-6915-4f80-8e2e-6f87664e8e9f tempest-ServersListShow296Test-1840877548 tempest-ServersListShow296Test-1840877548-project-member] Lock "ef130396-4736-4601-9024-6f562d5af828" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.476s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.332730] env[62507]: DEBUG nova.compute.manager [None req-edec3c59-0b0b-4727-998e-764e4c7fc22b tempest-ServersNegativeTestJSON-1851693462 tempest-ServersNegativeTestJSON-1851693462-project-member] [instance: 7b49dd64-781d-48c8-ac86-0c523b39f99a] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.335334] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3722c7f1-c63c-49c2-86a3-3fb8c8187acb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.342675] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229d8c9f-b9d0-4a7d-a866-6521526bf2b0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.357641] env[62507]: DEBUG nova.compute.provider_tree [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.359026] env[62507]: DEBUG nova.compute.manager [None req-edec3c59-0b0b-4727-998e-764e4c7fc22b tempest-ServersNegativeTestJSON-1851693462 tempest-ServersNegativeTestJSON-1851693462-project-member] [instance: 7b49dd64-781d-48c8-ac86-0c523b39f99a] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.366596] env[62507]: DEBUG nova.scheduler.client.report [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1227.380234] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.573s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.380818] env[62507]: ERROR nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = getattr(controller, method)(*args, **kwargs) [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._get(image_id) [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1227.380818] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] resp, body = self.http_client.get(url, headers=header) [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.request(url, 'GET', **kwargs) [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._handle_response(resp) [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exc.from_response(resp, resp.content) [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.381232] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self.driver.spawn(context, instance, image_meta, [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._fetch_image_if_missing(context, vi) [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image_fetch(context, vi, tmp_image_ds_loc) [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] images.fetch_image( [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] metadata = IMAGE_API.get(context, image_ref) [ 1227.381615] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return session.show(context, image_id, [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] _reraise_translated_image_exception(image_id) [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise new_exc.with_traceback(exc_trace) [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = getattr(controller, method)(*args, **kwargs) [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._get(image_id) [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1227.382018] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] resp, body = self.http_client.get(url, headers=header) [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.request(url, 'GET', **kwargs) [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._handle_response(resp) [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exc.from_response(resp, resp.content) [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1227.382431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.382431] env[62507]: DEBUG nova.compute.utils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1227.383326] env[62507]: DEBUG oslo_concurrency.lockutils [None req-edec3c59-0b0b-4727-998e-764e4c7fc22b tempest-ServersNegativeTestJSON-1851693462 tempest-ServersNegativeTestJSON-1851693462-project-member] Lock "7b49dd64-781d-48c8-ac86-0c523b39f99a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.776s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.384671] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Build of instance 4a3639c7-8795-4702-a729-8239b0d55d51 was re-scheduled: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1227.385136] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1227.385317] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1227.385473] env[62507]: DEBUG nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1227.385673] env[62507]: DEBUG nova.network.neutron [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1227.393085] env[62507]: DEBUG nova.compute.manager [None req-e71dd7a0-8f9c-4644-8ab8-3e90d84cda88 tempest-ServerAddressesTestJSON-899176983 tempest-ServerAddressesTestJSON-899176983-project-member] [instance: f70eaaec-66d0-4ec0-b947-3eaa9d6038ca] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.419584] env[62507]: DEBUG nova.compute.manager [None req-e71dd7a0-8f9c-4644-8ab8-3e90d84cda88 tempest-ServerAddressesTestJSON-899176983 tempest-ServerAddressesTestJSON-899176983-project-member] [instance: f70eaaec-66d0-4ec0-b947-3eaa9d6038ca] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1227.440149] env[62507]: DEBUG oslo_concurrency.lockutils [None req-e71dd7a0-8f9c-4644-8ab8-3e90d84cda88 tempest-ServerAddressesTestJSON-899176983 tempest-ServerAddressesTestJSON-899176983-project-member] Lock "f70eaaec-66d0-4ec0-b947-3eaa9d6038ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.619s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.450110] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.502019] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.502019] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.503133] env[62507]: INFO nova.compute.claims [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.540336] env[62507]: DEBUG neutronclient.v2_0.client [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62507) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1227.541830] env[62507]: ERROR nova.compute.manager [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = getattr(controller, method)(*args, **kwargs) [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._get(image_id) [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1227.541830] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] resp, body = self.http_client.get(url, headers=header) [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.request(url, 'GET', **kwargs) [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._handle_response(resp) [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exc.from_response(resp, resp.content) [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.542228] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self.driver.spawn(context, instance, image_meta, [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._fetch_image_if_missing(context, vi) [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image_fetch(context, vi, tmp_image_ds_loc) [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] images.fetch_image( [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] metadata = IMAGE_API.get(context, image_ref) [ 1227.542617] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return session.show(context, image_id, [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] _reraise_translated_image_exception(image_id) [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise new_exc.with_traceback(exc_trace) [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = getattr(controller, method)(*args, **kwargs) [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._get(image_id) [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1227.543056] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] resp, body = self.http_client.get(url, headers=header) [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.request(url, 'GET', **kwargs) [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self._handle_response(resp) [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exc.from_response(resp, resp.content) [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.543470] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._build_and_run_instance(context, instance, image, [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exception.RescheduledException( [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] nova.exception.RescheduledException: Build of instance 4a3639c7-8795-4702-a729-8239b0d55d51 was re-scheduled: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1227.543880] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] exception_handler_v20(status_code, error_body) [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise client_exc(message=error_message, [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Neutron server returns request_ids: ['req-e3b30400-a648-4dc1-ae89-f836f75ad2ec'] [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._deallocate_network(context, instance, requested_networks) [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self.network_api.deallocate_for_instance( [ 1227.544316] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] data = neutron.list_ports(**search_opts) [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.list('ports', self.ports_path, retrieve_all, [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] for r in self._pagination(collection, path, **params): [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] res = self.get(path, params=params) [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1227.544745] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.retry_request("GET", action, body=body, [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.do_request(method, action, body=body, [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._handle_fault_response(status_code, replybody, resp) [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exception.Unauthorized() [ 1227.545602] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] nova.exception.Unauthorized: Not authorized. [ 1227.546139] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1227.593134] env[62507]: INFO nova.scheduler.client.report [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Deleted allocations for instance 4a3639c7-8795-4702-a729-8239b0d55d51 [ 1227.612103] env[62507]: DEBUG oslo_concurrency.lockutils [None req-567590d8-2fad-4503-bdd3-0ccc0bde8c1a tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "4a3639c7-8795-4702-a729-8239b0d55d51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.726s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.613219] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "4a3639c7-8795-4702-a729-8239b0d55d51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.919s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.613445] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "4a3639c7-8795-4702-a729-8239b0d55d51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.613645] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "4a3639c7-8795-4702-a729-8239b0d55d51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.613854] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "4a3639c7-8795-4702-a729-8239b0d55d51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.616385] env[62507]: INFO nova.compute.manager [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Terminating instance [ 1227.617943] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquiring lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.618111] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Acquired lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.618283] env[62507]: DEBUG nova.network.neutron [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1227.627210] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.675596] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.865839] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5b5920-d693-45be-8f2c-b6c9a064191f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.873438] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbfbbf4-f905-485c-b7a2-c0428a5b1777 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.903199] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d54c41b-0dd7-408c-a591-6aab0e1ab00c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.910052] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271b0156-086d-49fb-aa55-40cc9ca4c3d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.924896] env[62507]: DEBUG nova.compute.provider_tree [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.934661] env[62507]: DEBUG nova.scheduler.client.report [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1227.950441] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.449s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.950926] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1227.957025] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.278s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.957025] env[62507]: INFO nova.compute.claims [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.988357] env[62507]: DEBUG nova.compute.utils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1227.992893] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1227.992893] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1227.997364] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1228.074501] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1228.096138] env[62507]: DEBUG nova.policy [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68528d00c0c54da18f2dbfa8e3e2b43d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '732ce554ebe1438d961e8d1199beecbd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1228.106133] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1228.106133] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1228.106133] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.106311] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1228.106311] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.106429] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1228.106873] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1228.106969] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1228.107201] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1228.107400] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1228.107676] env[62507]: DEBUG nova.virt.hardware [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1228.108995] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c85749-8e26-4d33-9d8c-4e93f339b740 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.121480] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faefb01e-c2e8-411c-9f78-d05c486e332a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.165867] env[62507]: DEBUG nova.network.neutron [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Updating instance_info_cache with network_info: [{"id": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "address": "fa:16:3e:87:0b:59", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0daa011f-53", "ovs_interfaceid": "0daa011f-53db-4cf1-b496-c6d7b6f44701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.167178] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.167329] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1228.167452] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1228.178462] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Releasing lock "refresh_cache-4a3639c7-8795-4702-a729-8239b0d55d51" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1228.178845] env[62507]: DEBUG nova.compute.manager [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1228.179050] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1228.179572] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2dbb6d9-44fa-455b-b011-45df74d839a7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.190639] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.190639] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.190774] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191093] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191093] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191260] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191260] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191393] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191561] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191655] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1228.191776] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1228.194910] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567b2549-eeff-4c4b-a3b4-6cdfd6e5612e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.210589] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.227822] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a3639c7-8795-4702-a729-8239b0d55d51 could not be found. [ 1228.228067] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1228.228260] env[62507]: INFO nova.compute.manager [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1228.228526] env[62507]: DEBUG oslo.service.loopingcall [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1228.232045] env[62507]: DEBUG nova.compute.manager [-] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1228.232045] env[62507]: DEBUG nova.network.neutron [-] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1228.351051] env[62507]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62507) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1228.351394] env[62507]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-e8b6aa51-dde6-49f8-a7c8-480c8218b1cf'] [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1228.351863] env[62507]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1228.352457] env[62507]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.353058] env[62507]: ERROR oslo.service.loopingcall [ 1228.353568] env[62507]: ERROR nova.compute.manager [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.371673] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9956ef-3526-415b-b264-7b3b3a1b4fc2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.379488] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58c189c-0194-41c4-b0a5-9d5a2455aaeb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.412065] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbc1b13-2e4d-4749-ba72-a406c75fb02e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.416062] env[62507]: ERROR nova.compute.manager [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] exception_handler_v20(status_code, error_body) [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise client_exc(message=error_message, [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Neutron server returns request_ids: ['req-e8b6aa51-dde6-49f8-a7c8-480c8218b1cf'] [ 1228.416062] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During handling of the above exception, another exception occurred: [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Traceback (most recent call last): [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._delete_instance(context, instance, bdms) [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._shutdown_instance(context, instance, bdms) [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._try_deallocate_network(context, instance, requested_networks) [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] with excutils.save_and_reraise_exception(): [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1228.416527] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self.force_reraise() [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise self.value [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] _deallocate_network_with_retries() [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return evt.wait() [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = hub.switch() [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.greenlet.switch() [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1228.417030] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = func(*self.args, **self.kw) [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] result = f(*args, **kwargs) [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._deallocate_network( [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self.network_api.deallocate_for_instance( [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] data = neutron.list_ports(**search_opts) [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.list('ports', self.ports_path, retrieve_all, [ 1228.417431] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] for r in self._pagination(collection, path, **params): [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] res = self.get(path, params=params) [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.retry_request("GET", action, body=body, [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1228.417902] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] return self.do_request(method, action, body=body, [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] ret = obj(*args, **kwargs) [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] self._handle_fault_response(status_code, replybody, resp) [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.418337] env[62507]: ERROR nova.compute.manager [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] [ 1228.423622] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6631dcf3-e78f-4ec1-aa9d-fec7ea29402f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.437471] env[62507]: DEBUG nova.compute.provider_tree [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.450147] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Lock "4a3639c7-8795-4702-a729-8239b0d55d51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.836s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.450731] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "4a3639c7-8795-4702-a729-8239b0d55d51" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 117.032s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.450925] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] During sync_power_state the instance has a pending task (deleting). Skip. [ 1228.451114] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "4a3639c7-8795-4702-a729-8239b0d55d51" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.456828] env[62507]: DEBUG nova.scheduler.client.report [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1228.473841] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.520s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.474361] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1228.477971] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Successfully created port: 79e408ad-9417-4bcd-9807-2736e0943a4e {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1228.514409] env[62507]: INFO nova.compute.manager [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] [instance: 4a3639c7-8795-4702-a729-8239b0d55d51] Successfully reverted task state from None on failure for instance. [ 1228.521085] env[62507]: DEBUG nova.compute.utils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server [None req-f81c28c2-11c6-4f80-97cc-be28d5c98ab8 tempest-TenantUsagesTestJSON-234509088 tempest-TenantUsagesTestJSON-234509088-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-e8b6aa51-dde6-49f8-a7c8-480c8218b1cf'] [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1228.521436] env[62507]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1228.522087] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1228.522803] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1228.523520] env[62507]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.524239] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1228.524821] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1228.525510] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1228.525510] env[62507]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1228.525510] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1228.525510] env[62507]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1228.525510] env[62507]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1228.525510] env[62507]: ERROR oslo_messaging.rpc.server [ 1228.525510] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1228.525510] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1228.539976] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1228.617743] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1228.621453] env[62507]: DEBUG nova.policy [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b8e0669ec4494fb0305760e24521ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53a7bfe145d144548ae019ad65d21ce9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1228.651360] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1228.651858] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1228.651858] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.651996] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1228.652102] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.652259] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1228.652462] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1228.652645] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1228.652840] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1228.653019] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1228.653831] env[62507]: DEBUG nova.virt.hardware [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1228.655426] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e4130d-60f4-41ed-be6b-ca1133250226 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.665349] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b28689-08b4-4d78-8250-4b92109df0e3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.078723] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Successfully created port: 3027b452-f26b-4fa6-8199-cbf5dfc92fa8 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1229.167279] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.167495] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1229.274327] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Successfully updated port: 79e408ad-9417-4bcd-9807-2736e0943a4e {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.288892] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "refresh_cache-d888bcb9-89ef-41aa-b637-e2a15efd0ce8" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.289130] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquired lock "refresh_cache-d888bcb9-89ef-41aa-b637-e2a15efd0ce8" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.289242] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1229.339667] env[62507]: DEBUG nova.compute.manager [req-026af14c-5639-40fa-8cb9-139b89779a1d req-d7acd304-b26e-4c2a-841a-9d79e7b45a81 service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Received event network-vif-plugged-79e408ad-9417-4bcd-9807-2736e0943a4e {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1229.339667] env[62507]: DEBUG oslo_concurrency.lockutils [req-026af14c-5639-40fa-8cb9-139b89779a1d req-d7acd304-b26e-4c2a-841a-9d79e7b45a81 service nova] Acquiring lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.339667] env[62507]: DEBUG oslo_concurrency.lockutils [req-026af14c-5639-40fa-8cb9-139b89779a1d req-d7acd304-b26e-4c2a-841a-9d79e7b45a81 service nova] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.339667] env[62507]: DEBUG oslo_concurrency.lockutils [req-026af14c-5639-40fa-8cb9-139b89779a1d req-d7acd304-b26e-4c2a-841a-9d79e7b45a81 service nova] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.340116] env[62507]: DEBUG nova.compute.manager [req-026af14c-5639-40fa-8cb9-139b89779a1d req-d7acd304-b26e-4c2a-841a-9d79e7b45a81 service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] No waiting events found dispatching network-vif-plugged-79e408ad-9417-4bcd-9807-2736e0943a4e {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1229.340116] env[62507]: WARNING nova.compute.manager [req-026af14c-5639-40fa-8cb9-139b89779a1d req-d7acd304-b26e-4c2a-841a-9d79e7b45a81 service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Received unexpected event network-vif-plugged-79e408ad-9417-4bcd-9807-2736e0943a4e for instance with vm_state building and task_state spawning. [ 1229.358699] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1229.568161] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Updating instance_info_cache with network_info: [{"id": "79e408ad-9417-4bcd-9807-2736e0943a4e", "address": "fa:16:3e:c2:60:36", "network": {"id": "05fcfc7d-24af-4a03-ba8e-43e9739ba26e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1347992293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732ce554ebe1438d961e8d1199beecbd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79e408ad-94", "ovs_interfaceid": "79e408ad-9417-4bcd-9807-2736e0943a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.579917] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Releasing lock "refresh_cache-d888bcb9-89ef-41aa-b637-e2a15efd0ce8" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.580219] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance network_info: |[{"id": "79e408ad-9417-4bcd-9807-2736e0943a4e", "address": "fa:16:3e:c2:60:36", "network": {"id": "05fcfc7d-24af-4a03-ba8e-43e9739ba26e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1347992293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732ce554ebe1438d961e8d1199beecbd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79e408ad-94", "ovs_interfaceid": "79e408ad-9417-4bcd-9807-2736e0943a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1229.580714] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:60:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79e408ad-9417-4bcd-9807-2736e0943a4e', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1229.588014] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Creating folder: Project (732ce554ebe1438d961e8d1199beecbd). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1229.588530] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4388317e-5753-429f-ae12-d2286c9186a9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.599603] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Created folder: Project (732ce554ebe1438d961e8d1199beecbd) in parent group-v497991. [ 1229.599790] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Creating folder: Instances. Parent ref: group-v498067. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1229.600013] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37ec3439-d35b-4bc0-b287-20ea9c8eadf7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.609111] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Created folder: Instances in parent group-v498067. [ 1229.609339] env[62507]: DEBUG oslo.service.loopingcall [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1229.609517] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1229.609705] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ea50656-d249-4111-95bd-0d7888f22af7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.630552] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1229.630552] env[62507]: value = "task-2460037" [ 1229.630552] env[62507]: _type = "Task" [ 1229.630552] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.637724] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460037, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.674720] env[62507]: DEBUG nova.compute.manager [req-54ef7056-5944-46f5-a02d-7ac3281aceaa req-d2c09fc6-b4f5-49b7-a2d1-4162cec9730a service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Received event network-vif-plugged-3027b452-f26b-4fa6-8199-cbf5dfc92fa8 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1229.674989] env[62507]: DEBUG oslo_concurrency.lockutils [req-54ef7056-5944-46f5-a02d-7ac3281aceaa req-d2c09fc6-b4f5-49b7-a2d1-4162cec9730a service nova] Acquiring lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.675290] env[62507]: DEBUG oslo_concurrency.lockutils [req-54ef7056-5944-46f5-a02d-7ac3281aceaa req-d2c09fc6-b4f5-49b7-a2d1-4162cec9730a service nova] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.675501] env[62507]: DEBUG oslo_concurrency.lockutils [req-54ef7056-5944-46f5-a02d-7ac3281aceaa req-d2c09fc6-b4f5-49b7-a2d1-4162cec9730a service nova] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.675678] env[62507]: DEBUG nova.compute.manager [req-54ef7056-5944-46f5-a02d-7ac3281aceaa req-d2c09fc6-b4f5-49b7-a2d1-4162cec9730a service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] No waiting events found dispatching network-vif-plugged-3027b452-f26b-4fa6-8199-cbf5dfc92fa8 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1229.675849] env[62507]: WARNING nova.compute.manager [req-54ef7056-5944-46f5-a02d-7ac3281aceaa req-d2c09fc6-b4f5-49b7-a2d1-4162cec9730a service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Received unexpected event network-vif-plugged-3027b452-f26b-4fa6-8199-cbf5dfc92fa8 for instance with vm_state building and task_state spawning. [ 1229.758756] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Successfully updated port: 3027b452-f26b-4fa6-8199-cbf5dfc92fa8 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1229.767053] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "refresh_cache-8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.767225] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired lock "refresh_cache-8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.767423] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1229.832046] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1230.037504] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Updating instance_info_cache with network_info: [{"id": "3027b452-f26b-4fa6-8199-cbf5dfc92fa8", "address": "fa:16:3e:a9:20:d2", "network": {"id": "317818e4-3465-4932-901e-96246fc172b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1405826547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53a7bfe145d144548ae019ad65d21ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3027b452-f2", "ovs_interfaceid": "3027b452-f26b-4fa6-8199-cbf5dfc92fa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.049454] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Releasing lock "refresh_cache-8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.049734] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance network_info: |[{"id": "3027b452-f26b-4fa6-8199-cbf5dfc92fa8", "address": "fa:16:3e:a9:20:d2", "network": {"id": "317818e4-3465-4932-901e-96246fc172b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1405826547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53a7bfe145d144548ae019ad65d21ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3027b452-f2", "ovs_interfaceid": "3027b452-f26b-4fa6-8199-cbf5dfc92fa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1230.050139] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:20:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3027b452-f26b-4fa6-8199-cbf5dfc92fa8', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1230.057725] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating folder: Project (53a7bfe145d144548ae019ad65d21ce9). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1230.058253] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-afa53076-9b60-46c8-93f6-4430e5dd72e0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.068716] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Created folder: Project (53a7bfe145d144548ae019ad65d21ce9) in parent group-v497991. [ 1230.068909] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating folder: Instances. Parent ref: group-v498070. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1230.069138] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c0e5c3d-fd88-47ee-aa5a-ee8e06a25bfb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.077425] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Created folder: Instances in parent group-v498070. [ 1230.077756] env[62507]: DEBUG oslo.service.loopingcall [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1230.077849] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1230.078072] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4832005d-8d3b-4397-b576-9a7936391918 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.096230] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1230.096230] env[62507]: value = "task-2460040" [ 1230.096230] env[62507]: _type = "Task" [ 1230.096230] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.103326] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460040, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.138702] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460037, 'name': CreateVM_Task, 'duration_secs': 0.349732} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.138859] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1230.139538] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.139700] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.140039] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1230.140258] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f52db55-d831-43dc-8cb3-5e98184f6569 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.144235] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Waiting for the task: (returnval){ [ 1230.144235] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52abc06d-0efd-d193-98ce-918ea789a948" [ 1230.144235] env[62507]: _type = "Task" [ 1230.144235] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.153142] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52abc06d-0efd-d193-98ce-918ea789a948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.167748] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.605973] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460040, 'name': CreateVM_Task, 'duration_secs': 0.292487} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.606196] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1230.606834] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.653541] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.653777] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1230.653988] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.654215] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.654518] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1230.654759] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88ba4c44-7818-4553-bf38-4a6896f88319 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.659217] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1230.659217] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52205de2-21e7-9a7c-3fad-5972cba0e16e" [ 1230.659217] env[62507]: _type = "Task" [ 1230.659217] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.666192] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52205de2-21e7-9a7c-3fad-5972cba0e16e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.169495] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.171029] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.171029] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1231.171224] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.180730] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.180926] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.181109] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.181267] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1231.182336] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b939e7e8-9ea7-410e-801b-6e9523903559 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.189981] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cc3e9f-a689-4e8c-9df9-b272cd037570 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.203861] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01bd36b-269c-4c03-8243-85d78bdde16e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.210106] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259b8836-93bd-49da-8f8f-2d09defc2a21 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.239063] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181174MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1231.239239] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.239439] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.320082] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e682e67f-5a36-4851-b870-7099d7db119d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.320258] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.320396] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.320547] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.320662] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.320813] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.320937] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.321064] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.321182] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.321296] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1231.335962] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.347932] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0bc7792e-d291-46ef-9ac1-420959c38191 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.361467] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.364224] env[62507]: DEBUG nova.compute.manager [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Received event network-changed-79e408ad-9417-4bcd-9807-2736e0943a4e {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1231.364407] env[62507]: DEBUG nova.compute.manager [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Refreshing instance network info cache due to event network-changed-79e408ad-9417-4bcd-9807-2736e0943a4e. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1231.364618] env[62507]: DEBUG oslo_concurrency.lockutils [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] Acquiring lock "refresh_cache-d888bcb9-89ef-41aa-b637-e2a15efd0ce8" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.364767] env[62507]: DEBUG oslo_concurrency.lockutils [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] Acquired lock "refresh_cache-d888bcb9-89ef-41aa-b637-e2a15efd0ce8" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.364974] env[62507]: DEBUG nova.network.neutron [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Refreshing network info cache for port 79e408ad-9417-4bcd-9807-2736e0943a4e {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1231.373020] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.383423] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e04f6623-4547-4095-a575-67eae0bbd289 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.394065] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0600b5ad-334a-41e3-add2-4f8458040774 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.406225] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b35972e0-4a7b-4b2e-940e-2d9f40b0e55f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.418027] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef76c5ad-176d-44c0-891a-66333b42d0ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.428781] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.438767] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.448485] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance df12d1b1-3c2a-47f8-b8df-d9993acf8d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.458762] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7d0d04-1c97-40e1-824c-25d04f87e468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.469064] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 955e2c90-e317-4148-887d-e9a4eacdda2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1231.469603] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1231.469603] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1231.684492] env[62507]: DEBUG nova.network.neutron [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Updated VIF entry in instance network info cache for port 79e408ad-9417-4bcd-9807-2736e0943a4e. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1231.684861] env[62507]: DEBUG nova.network.neutron [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Updating instance_info_cache with network_info: [{"id": "79e408ad-9417-4bcd-9807-2736e0943a4e", "address": "fa:16:3e:c2:60:36", "network": {"id": "05fcfc7d-24af-4a03-ba8e-43e9739ba26e", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1347992293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732ce554ebe1438d961e8d1199beecbd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79e408ad-94", "ovs_interfaceid": "79e408ad-9417-4bcd-9807-2736e0943a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.694814] env[62507]: DEBUG oslo_concurrency.lockutils [req-9c12fc19-c0e8-4f1d-8d79-b6601ecb40b6 req-7a6e4245-8ccf-49b1-b000-3931b1590c5c service nova] Releasing lock "refresh_cache-d888bcb9-89ef-41aa-b637-e2a15efd0ce8" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.706942] env[62507]: DEBUG nova.compute.manager [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Received event network-changed-3027b452-f26b-4fa6-8199-cbf5dfc92fa8 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1231.707125] env[62507]: DEBUG nova.compute.manager [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Refreshing instance network info cache due to event network-changed-3027b452-f26b-4fa6-8199-cbf5dfc92fa8. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1231.707319] env[62507]: DEBUG oslo_concurrency.lockutils [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] Acquiring lock "refresh_cache-8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.707484] env[62507]: DEBUG oslo_concurrency.lockutils [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] Acquired lock "refresh_cache-8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.707678] env[62507]: DEBUG nova.network.neutron [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Refreshing network info cache for port 3027b452-f26b-4fa6-8199-cbf5dfc92fa8 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1231.763628] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85c2d8a-10ed-43a3-a8e2-af1bbce3bcda {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.771164] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039076e5-ae4b-4029-9aaf-7a9a3aa98700 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.803822] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae294fe-3094-4ac0-a2ed-603bf53a57f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.811051] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac610d1-e0d1-40fc-b399-0f73bd0bccdf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.823649] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.832494] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1231.846873] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1231.847067] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.608s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.012081] env[62507]: DEBUG nova.network.neutron [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Updated VIF entry in instance network info cache for port 3027b452-f26b-4fa6-8199-cbf5dfc92fa8. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1232.012457] env[62507]: DEBUG nova.network.neutron [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Updating instance_info_cache with network_info: [{"id": "3027b452-f26b-4fa6-8199-cbf5dfc92fa8", "address": "fa:16:3e:a9:20:d2", "network": {"id": "317818e4-3465-4932-901e-96246fc172b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1405826547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53a7bfe145d144548ae019ad65d21ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3027b452-f2", "ovs_interfaceid": "3027b452-f26b-4fa6-8199-cbf5dfc92fa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.022531] env[62507]: DEBUG oslo_concurrency.lockutils [req-96dec167-5e67-49b5-9dcd-e62ce4855bdf req-d911d5b0-a523-4628-9d42-14cf77ea692c service nova] Releasing lock "refresh_cache-8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.084392] env[62507]: DEBUG oslo_concurrency.lockutils [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.561205] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "01043570-d72d-4a97-8c51-cfe30b25b82b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.561205] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.670099] env[62507]: WARNING oslo_vmware.rw_handles [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1275.670099] env[62507]: ERROR oslo_vmware.rw_handles [ 1275.670099] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1275.672024] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1275.672270] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Copying Virtual Disk [datastore2] vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/7ad9689b-faa9-4b25-87fe-ab9cae556f9f/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1275.672585] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0eb149e8-69cc-46d6-8422-25477fe5c04e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.681089] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Waiting for the task: (returnval){ [ 1275.681089] env[62507]: value = "task-2460041" [ 1275.681089] env[62507]: _type = "Task" [ 1275.681089] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.688660] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Task: {'id': task-2460041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.191803] env[62507]: DEBUG oslo_vmware.exceptions [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1276.192314] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.193031] env[62507]: ERROR nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.193031] env[62507]: Faults: ['InvalidArgument'] [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] Traceback (most recent call last): [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] yield resources [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self.driver.spawn(context, instance, image_meta, [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self._fetch_image_if_missing(context, vi) [ 1276.193031] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] image_cache(vi, tmp_image_ds_loc) [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] vm_util.copy_virtual_disk( [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] session._wait_for_task(vmdk_copy_task) [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] return self.wait_for_task(task_ref) [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] return evt.wait() [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] result = hub.switch() [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1276.194405] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] return self.greenlet.switch() [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self.f(*self.args, **self.kw) [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] raise exceptions.translate_fault(task_info.error) [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] Faults: ['InvalidArgument'] [ 1276.194699] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] [ 1276.194699] env[62507]: INFO nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Terminating instance [ 1276.195175] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.195428] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.195710] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0fc5d646-846d-430c-a5c7-c9446fa234b2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.197978] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1276.198201] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1276.198983] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dade98-b454-4c5c-853b-29488d768c2d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.206233] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1276.207199] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-810a27cb-8a9d-4d08-9f71-1aa680213347 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.208647] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.208817] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1276.209566] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74ddec15-bea7-49b4-8be2-c96836f44804 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.214257] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Waiting for the task: (returnval){ [ 1276.214257] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52c263a8-f08d-f09d-6c68-76dce9544ff9" [ 1276.214257] env[62507]: _type = "Task" [ 1276.214257] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.221339] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52c263a8-f08d-f09d-6c68-76dce9544ff9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.278500] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1276.278714] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1276.278978] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Deleting the datastore file [datastore2] e682e67f-5a36-4851-b870-7099d7db119d {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.279272] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3df4896b-155f-4700-afae-cd7dba4e7501 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.285538] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Waiting for the task: (returnval){ [ 1276.285538] env[62507]: value = "task-2460043" [ 1276.285538] env[62507]: _type = "Task" [ 1276.285538] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.293635] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Task: {'id': task-2460043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.724794] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1276.725148] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Creating directory with path [datastore2] vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.725263] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdf92f91-d890-4165-b17c-cbbcd7148bcc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.736586] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Created directory with path [datastore2] vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.736778] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Fetch image to [datastore2] vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1276.736943] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1276.737697] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54e7c15-f747-4f0a-a2f4-8b866ce0cfc7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.744307] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd97a4ea-1ab1-4b53-83d4-7599d8d294ad {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.753409] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646a1e73-7a49-4212-9388-b59d26880832 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.785200] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968a9b15-b6cd-4b12-a52e-c71f71d57332 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.796505] env[62507]: DEBUG oslo_vmware.api [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Task: {'id': task-2460043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069689} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.796707] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b653f7a3-7cc2-4fd3-9a6f-d8b096f393b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.798315] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1276.798504] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1276.798675] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1276.798871] env[62507]: INFO nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1276.801276] env[62507]: DEBUG nova.compute.claims [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1276.801454] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.801664] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.821058] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1276.875553] env[62507]: DEBUG oslo_vmware.rw_handles [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1276.937329] env[62507]: DEBUG oslo_vmware.rw_handles [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1276.937529] env[62507]: DEBUG oslo_vmware.rw_handles [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1277.196237] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef537577-165f-4a1a-994b-4c632da9f528 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.203875] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fc52b9-4802-43a3-bbb0-4298be252218 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.233973] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d043068-3996-4fea-b77a-085025b367a6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.241170] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5f8826-1a11-4149-bbeb-681d8fc0fc1b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.254714] env[62507]: DEBUG nova.compute.provider_tree [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.263653] env[62507]: DEBUG nova.scheduler.client.report [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1277.280373] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.479s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.280899] env[62507]: ERROR nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.280899] env[62507]: Faults: ['InvalidArgument'] [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] Traceback (most recent call last): [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self.driver.spawn(context, instance, image_meta, [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self._fetch_image_if_missing(context, vi) [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] image_cache(vi, tmp_image_ds_loc) [ 1277.280899] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] vm_util.copy_virtual_disk( [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] session._wait_for_task(vmdk_copy_task) [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] return self.wait_for_task(task_ref) [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] return evt.wait() [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] result = hub.switch() [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] return self.greenlet.switch() [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1277.281346] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] self.f(*self.args, **self.kw) [ 1277.281611] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1277.281611] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] raise exceptions.translate_fault(task_info.error) [ 1277.281611] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.281611] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] Faults: ['InvalidArgument'] [ 1277.281611] env[62507]: ERROR nova.compute.manager [instance: e682e67f-5a36-4851-b870-7099d7db119d] [ 1277.281611] env[62507]: DEBUG nova.compute.utils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1277.282959] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Build of instance e682e67f-5a36-4851-b870-7099d7db119d was re-scheduled: A specified parameter was not correct: fileType [ 1277.282959] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1277.283340] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1277.283512] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1277.283694] env[62507]: DEBUG nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1277.283949] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1277.892296] env[62507]: DEBUG nova.network.neutron [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.905976] env[62507]: INFO nova.compute.manager [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Took 0.62 seconds to deallocate network for instance. [ 1278.002467] env[62507]: INFO nova.scheduler.client.report [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Deleted allocations for instance e682e67f-5a36-4851-b870-7099d7db119d [ 1278.025749] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67986fa1-4c6d-482f-bada-0afd7b1b2c01 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "e682e67f-5a36-4851-b870-7099d7db119d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.225s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.026993] env[62507]: DEBUG oslo_concurrency.lockutils [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "e682e67f-5a36-4851-b870-7099d7db119d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.268s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.027245] env[62507]: DEBUG oslo_concurrency.lockutils [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Acquiring lock "e682e67f-5a36-4851-b870-7099d7db119d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.027915] env[62507]: DEBUG oslo_concurrency.lockutils [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "e682e67f-5a36-4851-b870-7099d7db119d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.027915] env[62507]: DEBUG oslo_concurrency.lockutils [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "e682e67f-5a36-4851-b870-7099d7db119d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.031052] env[62507]: INFO nova.compute.manager [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Terminating instance [ 1278.032680] env[62507]: DEBUG nova.compute.manager [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1278.033231] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1278.033231] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc871a25-641a-433b-9415-54b3e62efdf7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.041721] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dffec01-dbdc-4b56-b9fc-bb24e8ebceb3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.052700] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1278.074700] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e682e67f-5a36-4851-b870-7099d7db119d could not be found. [ 1278.074843] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1278.075062] env[62507]: INFO nova.compute.manager [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1278.075372] env[62507]: DEBUG oslo.service.loopingcall [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1278.075885] env[62507]: DEBUG nova.compute.manager [-] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1278.075885] env[62507]: DEBUG nova.network.neutron [-] [instance: e682e67f-5a36-4851-b870-7099d7db119d] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1278.106393] env[62507]: DEBUG nova.network.neutron [-] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.109080] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.109343] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.110820] env[62507]: INFO nova.compute.claims [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.120247] env[62507]: INFO nova.compute.manager [-] [instance: e682e67f-5a36-4851-b870-7099d7db119d] Took 0.04 seconds to deallocate network for instance. [ 1278.210040] env[62507]: DEBUG oslo_concurrency.lockutils [None req-06e2d2ed-578c-4342-a9f7-752e3ca3bf06 tempest-InstanceActionsTestJSON-1144674175 tempest-InstanceActionsTestJSON-1144674175-project-member] Lock "e682e67f-5a36-4851-b870-7099d7db119d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.211010] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "e682e67f-5a36-4851-b870-7099d7db119d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 166.791s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.211010] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: e682e67f-5a36-4851-b870-7099d7db119d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1278.211010] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "e682e67f-5a36-4851-b870-7099d7db119d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.432034] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143413f2-00a0-44cd-9ec5-cb90772f4f3e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.439742] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89da225e-dec3-41b0-9fda-71cb7e2cc21b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.470245] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0133e02f-3663-4219-ac5d-ad9f03f00053 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.477363] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42f264f-bfdc-4757-97dc-8d8281e38fa6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.490501] env[62507]: DEBUG nova.compute.provider_tree [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.501283] env[62507]: DEBUG nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1278.514639] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.515162] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1278.545489] env[62507]: DEBUG nova.compute.utils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.546838] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1278.547038] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1278.556902] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1278.605248] env[62507]: DEBUG nova.policy [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b8e0669ec4494fb0305760e24521ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53a7bfe145d144548ae019ad65d21ce9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1278.617669] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1278.643212] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1278.643467] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1278.643626] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.643809] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1278.643958] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.644130] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1278.644345] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1278.644502] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1278.644670] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1278.644835] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1278.645018] env[62507]: DEBUG nova.virt.hardware [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1278.645872] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ce4278-7563-4a15-a665-d79c69eabfb3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.654134] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03f809d-7863-4977-b39c-dea05799fd8a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.989051] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Successfully created port: 83b6101a-474f-4b3c-9e85-2a12cf0d7377 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.710937] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Successfully updated port: 83b6101a-474f-4b3c-9e85-2a12cf0d7377 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.721870] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "refresh_cache-f4f51028-a313-4d17-bcf1-4decec2d3c3d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.722043] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired lock "refresh_cache-f4f51028-a313-4d17-bcf1-4decec2d3c3d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.722327] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1279.766968] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1279.925793] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Updating instance_info_cache with network_info: [{"id": "83b6101a-474f-4b3c-9e85-2a12cf0d7377", "address": "fa:16:3e:be:02:f9", "network": {"id": "317818e4-3465-4932-901e-96246fc172b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1405826547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53a7bfe145d144548ae019ad65d21ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b6101a-47", "ovs_interfaceid": "83b6101a-474f-4b3c-9e85-2a12cf0d7377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.928982] env[62507]: DEBUG nova.compute.manager [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Received event network-vif-plugged-83b6101a-474f-4b3c-9e85-2a12cf0d7377 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1279.929215] env[62507]: DEBUG oslo_concurrency.lockutils [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] Acquiring lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.929419] env[62507]: DEBUG oslo_concurrency.lockutils [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.929591] env[62507]: DEBUG oslo_concurrency.lockutils [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.929753] env[62507]: DEBUG nova.compute.manager [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] No waiting events found dispatching network-vif-plugged-83b6101a-474f-4b3c-9e85-2a12cf0d7377 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1279.929918] env[62507]: WARNING nova.compute.manager [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Received unexpected event network-vif-plugged-83b6101a-474f-4b3c-9e85-2a12cf0d7377 for instance with vm_state building and task_state spawning. [ 1279.930097] env[62507]: DEBUG nova.compute.manager [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Received event network-changed-83b6101a-474f-4b3c-9e85-2a12cf0d7377 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1279.930262] env[62507]: DEBUG nova.compute.manager [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Refreshing instance network info cache due to event network-changed-83b6101a-474f-4b3c-9e85-2a12cf0d7377. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1279.930430] env[62507]: DEBUG oslo_concurrency.lockutils [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] Acquiring lock "refresh_cache-f4f51028-a313-4d17-bcf1-4decec2d3c3d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.936881] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Releasing lock "refresh_cache-f4f51028-a313-4d17-bcf1-4decec2d3c3d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.937240] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance network_info: |[{"id": "83b6101a-474f-4b3c-9e85-2a12cf0d7377", "address": "fa:16:3e:be:02:f9", "network": {"id": "317818e4-3465-4932-901e-96246fc172b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1405826547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53a7bfe145d144548ae019ad65d21ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b6101a-47", "ovs_interfaceid": "83b6101a-474f-4b3c-9e85-2a12cf0d7377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1279.937435] env[62507]: DEBUG oslo_concurrency.lockutils [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] Acquired lock "refresh_cache-f4f51028-a313-4d17-bcf1-4decec2d3c3d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.937609] env[62507]: DEBUG nova.network.neutron [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Refreshing network info cache for port 83b6101a-474f-4b3c-9e85-2a12cf0d7377 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1279.940804] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:02:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83b6101a-474f-4b3c-9e85-2a12cf0d7377', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1279.948049] env[62507]: DEBUG oslo.service.loopingcall [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1279.948659] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1279.951110] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d44d250-17b4-4c3b-86e7-a44d90fe7d07 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.971374] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1279.971374] env[62507]: value = "task-2460044" [ 1279.971374] env[62507]: _type = "Task" [ 1279.971374] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.979426] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460044, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.246939] env[62507]: DEBUG nova.network.neutron [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Updated VIF entry in instance network info cache for port 83b6101a-474f-4b3c-9e85-2a12cf0d7377. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1280.247329] env[62507]: DEBUG nova.network.neutron [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Updating instance_info_cache with network_info: [{"id": "83b6101a-474f-4b3c-9e85-2a12cf0d7377", "address": "fa:16:3e:be:02:f9", "network": {"id": "317818e4-3465-4932-901e-96246fc172b8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1405826547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53a7bfe145d144548ae019ad65d21ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b6101a-47", "ovs_interfaceid": "83b6101a-474f-4b3c-9e85-2a12cf0d7377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.257572] env[62507]: DEBUG oslo_concurrency.lockutils [req-ebaff098-4c71-47bb-a691-0f5ce04adbbc req-ef7fee25-e409-48d5-865e-3b9df2f10cac service nova] Releasing lock "refresh_cache-f4f51028-a313-4d17-bcf1-4decec2d3c3d" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.481689] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460044, 'name': CreateVM_Task, 'duration_secs': 0.436187} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.481779] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1280.482442] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.482611] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.482929] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1280.483212] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ffc5661-9372-4407-bf07-fe1c7ab92431 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.487758] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1280.487758] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f950f4-7a6a-6103-856e-311645957a43" [ 1280.487758] env[62507]: _type = "Task" [ 1280.487758] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.495211] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f950f4-7a6a-6103-856e-311645957a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.997724] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.998038] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1280.998242] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1284.004963] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.840323] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.840611] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.167587] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.708205] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "ef5633ea-273d-429f-9a02-326711b73bab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.708432] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "ef5633ea-273d-429f-9a02-326711b73bab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.167773] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.168104] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1288.168137] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1288.189745] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.189913] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.190438] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.190629] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.190762] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.190891] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.191037] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.191177] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.191302] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.191424] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1288.191547] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1288.192083] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.168242] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.168538] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1290.167846] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.167820] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.167594] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.182213] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.182450] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.182618] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.182772] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1293.183942] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befc4d28-3329-432e-a607-f232865031ec {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.192621] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2e6fd6-347d-46d6-ac32-2675ba03f046 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.207320] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decd4fca-cccc-48c8-b5e8-3bade8316d94 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.213862] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd8d9de-8f72-4a48-8b6b-aab50924c520 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.242583] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181159MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1293.242743] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.242936] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.345525] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.345695] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346653] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346653] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346653] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346653] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346806] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346806] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346806] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.346806] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.361368] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.373560] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e04f6623-4547-4095-a575-67eae0bbd289 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.386034] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0600b5ad-334a-41e3-add2-4f8458040774 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.399673] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b35972e0-4a7b-4b2e-940e-2d9f40b0e55f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.412867] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef76c5ad-176d-44c0-891a-66333b42d0ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.426504] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.443586] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.457017] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance df12d1b1-3c2a-47f8-b8df-d9993acf8d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.470987] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7d0d04-1c97-40e1-824c-25d04f87e468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.481924] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 955e2c90-e317-4148-887d-e9a4eacdda2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.493222] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.505380] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1293.505522] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1293.505743] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1293.808310] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e769a078-07fe-4a8d-a359-0e54a946f996 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.816063] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d170d5-5f9b-4e2b-8edb-b133ba8c984d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.855484] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ced211b-baea-495d-ae22-c730411f7b20 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.865494] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c53fde5-a826-4a63-ba56-1cb870ad9440 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.884326] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.895948] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1293.912344] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1293.912549] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.670s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.839941] env[62507]: WARNING oslo_vmware.rw_handles [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.839941] env[62507]: ERROR oslo_vmware.rw_handles [ 1324.840673] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1324.842290] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1324.842541] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Copying Virtual Disk [datastore2] vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/251463bd-99e3-4811-be2c-c2b7a3dc33cb/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1324.842841] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf63e83a-cc3d-4f9c-a083-fa96eaa9c6a8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.850718] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Waiting for the task: (returnval){ [ 1324.850718] env[62507]: value = "task-2460045" [ 1324.850718] env[62507]: _type = "Task" [ 1324.850718] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.858795] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Task: {'id': task-2460045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.360784] env[62507]: DEBUG oslo_vmware.exceptions [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1325.361058] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.361605] env[62507]: ERROR nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.361605] env[62507]: Faults: ['InvalidArgument'] [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Traceback (most recent call last): [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] yield resources [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self.driver.spawn(context, instance, image_meta, [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self._fetch_image_if_missing(context, vi) [ 1325.361605] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] image_cache(vi, tmp_image_ds_loc) [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] vm_util.copy_virtual_disk( [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] session._wait_for_task(vmdk_copy_task) [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] return self.wait_for_task(task_ref) [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] return evt.wait() [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] result = hub.switch() [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1325.361984] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] return self.greenlet.switch() [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self.f(*self.args, **self.kw) [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] raise exceptions.translate_fault(task_info.error) [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Faults: ['InvalidArgument'] [ 1325.362426] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] [ 1325.362426] env[62507]: INFO nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Terminating instance [ 1325.363064] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.363277] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.363613] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0704a1dd-5468-48a1-b824-3d15aab2e212 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.367082] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1325.367281] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1325.367992] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935684de-4ae7-4049-afbf-23d3436b7a23 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.371775] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.371947] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1325.374430] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eb5f0e4-b706-47a2-aa12-59d7bbaeae7d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.376501] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1325.377010] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41bc86ba-b82c-4ce5-a19e-0b63e5c1313d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.380975] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 1325.380975] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52d0e3fd-05f7-98a0-69c2-98f04b45cc78" [ 1325.380975] env[62507]: _type = "Task" [ 1325.380975] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.391986] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52d0e3fd-05f7-98a0-69c2-98f04b45cc78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.440362] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1325.440510] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1325.440674] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Deleting the datastore file [datastore2] a9b1ef96-1409-4700-a1bb-4aec1691a0fd {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.441233] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a5e3afe-f9f7-40ef-aa84-69877499dd44 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.447296] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Waiting for the task: (returnval){ [ 1325.447296] env[62507]: value = "task-2460047" [ 1325.447296] env[62507]: _type = "Task" [ 1325.447296] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.454984] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Task: {'id': task-2460047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.891677] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1325.891944] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating directory with path [datastore2] vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.892135] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a027e022-0791-4e6a-b87c-8f02a91e0049 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.903484] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Created directory with path [datastore2] vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.903884] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Fetch image to [datastore2] vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1325.903884] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1325.904587] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830e3b2d-5a11-4613-acf0-6473a5cac267 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.911207] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295e9b24-f773-47ce-a036-6a35e52d2884 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.919882] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57727375-6d75-4557-9a8f-a31fa24ad1e4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.954041] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841909b8-8d26-420c-a34a-b5cd310e83b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.960916] env[62507]: DEBUG oslo_vmware.api [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Task: {'id': task-2460047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079032} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.962286] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.962478] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1325.962652] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1325.962830] env[62507]: INFO nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1325.964587] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9b78ee16-f01d-4e6a-8762-1ce24327357a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.966433] env[62507]: DEBUG nova.compute.claims [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1325.966607] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.966818] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.992142] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1326.050202] env[62507]: DEBUG oslo_vmware.rw_handles [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1326.114778] env[62507]: DEBUG oslo_vmware.rw_handles [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1326.115056] env[62507]: DEBUG oslo_vmware.rw_handles [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1326.318016] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f619d637-0ce0-4e2a-9f5b-6ea60b53a279 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.325629] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b636e6b3-c25b-4334-b0c8-5867b8aaf047 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.355774] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb05db0-7e82-4967-99f4-b26b2dd7a579 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.362241] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd918c4-a569-4ec3-8af7-7dc969104e3a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.374591] env[62507]: DEBUG nova.compute.provider_tree [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.384435] env[62507]: DEBUG nova.scheduler.client.report [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1326.399360] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.432s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.399828] env[62507]: ERROR nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.399828] env[62507]: Faults: ['InvalidArgument'] [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Traceback (most recent call last): [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self.driver.spawn(context, instance, image_meta, [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self._fetch_image_if_missing(context, vi) [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] image_cache(vi, tmp_image_ds_loc) [ 1326.399828] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] vm_util.copy_virtual_disk( [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] session._wait_for_task(vmdk_copy_task) [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] return self.wait_for_task(task_ref) [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] return evt.wait() [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] result = hub.switch() [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] return self.greenlet.switch() [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1326.400135] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] self.f(*self.args, **self.kw) [ 1326.400472] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1326.400472] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] raise exceptions.translate_fault(task_info.error) [ 1326.400472] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.400472] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Faults: ['InvalidArgument'] [ 1326.400472] env[62507]: ERROR nova.compute.manager [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] [ 1326.400607] env[62507]: DEBUG nova.compute.utils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1326.401887] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Build of instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd was re-scheduled: A specified parameter was not correct: fileType [ 1326.401887] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1326.402275] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1326.402455] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1326.402630] env[62507]: DEBUG nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1326.402793] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1326.912425] env[62507]: DEBUG nova.network.neutron [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.923464] env[62507]: INFO nova.compute.manager [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Took 0.52 seconds to deallocate network for instance. [ 1327.044184] env[62507]: INFO nova.scheduler.client.report [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Deleted allocations for instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd [ 1327.065111] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f6dd7c06-c431-4678-80ef-4215087b7d38 tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.332s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.066439] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.417s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.066713] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Acquiring lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.066987] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.067146] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.069217] env[62507]: INFO nova.compute.manager [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Terminating instance [ 1327.071032] env[62507]: DEBUG nova.compute.manager [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1327.071277] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1327.071705] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97fb89e0-6229-4058-8566-2611c1f3e470 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.081391] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42047940-ca97-4123-8ef5-1cc61a2be040 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.091900] env[62507]: DEBUG nova.compute.manager [None req-09418f37-2639-41f1-9799-fdefa079324f tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 0bc7792e-d291-46ef-9ac1-420959c38191] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1327.113689] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a9b1ef96-1409-4700-a1bb-4aec1691a0fd could not be found. [ 1327.113986] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1327.114090] env[62507]: INFO nova.compute.manager [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1327.114333] env[62507]: DEBUG oslo.service.loopingcall [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.114570] env[62507]: DEBUG nova.compute.manager [-] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1327.114667] env[62507]: DEBUG nova.network.neutron [-] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1327.117023] env[62507]: DEBUG nova.compute.manager [None req-09418f37-2639-41f1-9799-fdefa079324f tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 0bc7792e-d291-46ef-9ac1-420959c38191] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1327.136570] env[62507]: DEBUG oslo_concurrency.lockutils [None req-09418f37-2639-41f1-9799-fdefa079324f tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "0bc7792e-d291-46ef-9ac1-420959c38191" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.340s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.143804] env[62507]: DEBUG nova.network.neutron [-] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.149914] env[62507]: DEBUG nova.compute.manager [None req-ccd929f6-3a22-4960-aa73-75d6678ca2ff tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1327.152562] env[62507]: INFO nova.compute.manager [-] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] Took 0.04 seconds to deallocate network for instance. [ 1327.171185] env[62507]: DEBUG nova.compute.manager [None req-ccd929f6-3a22-4960-aa73-75d6678ca2ff tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] [instance: ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1327.191094] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ccd929f6-3a22-4960-aa73-75d6678ca2ff tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "ffd1c74f-b08a-4f59-bc99-4a6910dbe1ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.280s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.200346] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1327.240407] env[62507]: DEBUG oslo_concurrency.lockutils [None req-5a5d21f1-511d-47ba-80be-9b32d07de63c tempest-VolumesAdminNegativeTest-630820101 tempest-VolumesAdminNegativeTest-630820101-project-member] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.242673] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 215.823s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.242673] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a9b1ef96-1409-4700-a1bb-4aec1691a0fd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1327.242673] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "a9b1ef96-1409-4700-a1bb-4aec1691a0fd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.251931] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.252188] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.253657] env[62507]: INFO nova.compute.claims [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1327.530861] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be711b0-e7ba-45db-a71e-7f004c21b2b1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.538417] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e2c570-30ab-46a5-8a80-6242d4078f31 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.567601] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ea93a5-1279-4e1d-9471-3ea43d532e29 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.574186] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7733f544-5f2a-4fc9-a3a3-12a6bd9b6600 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.586607] env[62507]: DEBUG nova.compute.provider_tree [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.594732] env[62507]: DEBUG nova.scheduler.client.report [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1327.607683] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.355s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.608183] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1327.643969] env[62507]: DEBUG nova.compute.utils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.644074] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1327.644240] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1327.652907] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1327.717028] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1327.742455] env[62507]: DEBUG nova.policy [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51f1ac6b24e84840bc58ad2e78d890d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6ccbf7c4c744d8ab5858bf4ece0896c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1327.744870] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1327.746034] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1327.746034] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.746034] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1327.746034] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.746034] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1327.746255] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1327.746255] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1327.746255] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1327.750025] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1327.750025] env[62507]: DEBUG nova.virt.hardware [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1327.750025] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afc08d1-7e1f-4e32-bac2-cdada7919435 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.757084] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b304ed-299c-41b8-a2b2-4e521c9f7df4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.123678] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Successfully created port: d0e504f4-6e57-4ee2-9333-8b8fbff71253 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1328.739456] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Successfully updated port: d0e504f4-6e57-4ee2-9333-8b8fbff71253 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.756535] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "refresh_cache-65efc608-6573-4690-8d11-2f0459647d70" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.757939] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquired lock "refresh_cache-65efc608-6573-4690-8d11-2f0459647d70" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.758191] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1328.812393] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1329.063618] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Updating instance_info_cache with network_info: [{"id": "d0e504f4-6e57-4ee2-9333-8b8fbff71253", "address": "fa:16:3e:a4:3f:f7", "network": {"id": "6793e2b6-f6de-4b13-bb64-fffe633271e2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-660033301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6ccbf7c4c744d8ab5858bf4ece0896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0e504f4-6e", "ovs_interfaceid": "d0e504f4-6e57-4ee2-9333-8b8fbff71253", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.074945] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Releasing lock "refresh_cache-65efc608-6573-4690-8d11-2f0459647d70" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.075282] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance network_info: |[{"id": "d0e504f4-6e57-4ee2-9333-8b8fbff71253", "address": "fa:16:3e:a4:3f:f7", "network": {"id": "6793e2b6-f6de-4b13-bb64-fffe633271e2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-660033301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6ccbf7c4c744d8ab5858bf4ece0896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0e504f4-6e", "ovs_interfaceid": "d0e504f4-6e57-4ee2-9333-8b8fbff71253", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1329.075697] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:3f:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0e504f4-6e57-4ee2-9333-8b8fbff71253', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.084725] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Creating folder: Project (d6ccbf7c4c744d8ab5858bf4ece0896c). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1329.085192] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-155147e1-cbab-4b09-a7e9-658e2a152db7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.097075] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Created folder: Project (d6ccbf7c4c744d8ab5858bf4ece0896c) in parent group-v497991. [ 1329.097271] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Creating folder: Instances. Parent ref: group-v498074. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1329.097496] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e017c11e-5e99-4efa-bbb5-79686eaeb3ef {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.106654] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Created folder: Instances in parent group-v498074. [ 1329.106900] env[62507]: DEBUG oslo.service.loopingcall [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.107101] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1329.107301] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-542d9829-470f-441d-9bd7-f112e55c9687 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.125786] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.125786] env[62507]: value = "task-2460050" [ 1329.125786] env[62507]: _type = "Task" [ 1329.125786] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.130831] env[62507]: DEBUG nova.compute.manager [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Received event network-vif-plugged-d0e504f4-6e57-4ee2-9333-8b8fbff71253 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1329.131049] env[62507]: DEBUG oslo_concurrency.lockutils [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] Acquiring lock "65efc608-6573-4690-8d11-2f0459647d70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.131262] env[62507]: DEBUG oslo_concurrency.lockutils [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] Lock "65efc608-6573-4690-8d11-2f0459647d70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.131434] env[62507]: DEBUG oslo_concurrency.lockutils [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] Lock "65efc608-6573-4690-8d11-2f0459647d70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.131600] env[62507]: DEBUG nova.compute.manager [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] No waiting events found dispatching network-vif-plugged-d0e504f4-6e57-4ee2-9333-8b8fbff71253 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1329.131765] env[62507]: WARNING nova.compute.manager [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Received unexpected event network-vif-plugged-d0e504f4-6e57-4ee2-9333-8b8fbff71253 for instance with vm_state building and task_state spawning. [ 1329.131927] env[62507]: DEBUG nova.compute.manager [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Received event network-changed-d0e504f4-6e57-4ee2-9333-8b8fbff71253 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1329.132140] env[62507]: DEBUG nova.compute.manager [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Refreshing instance network info cache due to event network-changed-d0e504f4-6e57-4ee2-9333-8b8fbff71253. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1329.132339] env[62507]: DEBUG oslo_concurrency.lockutils [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] Acquiring lock "refresh_cache-65efc608-6573-4690-8d11-2f0459647d70" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.132477] env[62507]: DEBUG oslo_concurrency.lockutils [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] Acquired lock "refresh_cache-65efc608-6573-4690-8d11-2f0459647d70" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.132637] env[62507]: DEBUG nova.network.neutron [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Refreshing network info cache for port d0e504f4-6e57-4ee2-9333-8b8fbff71253 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1329.138517] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460050, 'name': CreateVM_Task} progress is 5%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.418085] env[62507]: DEBUG nova.network.neutron [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Updated VIF entry in instance network info cache for port d0e504f4-6e57-4ee2-9333-8b8fbff71253. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1329.418526] env[62507]: DEBUG nova.network.neutron [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Updating instance_info_cache with network_info: [{"id": "d0e504f4-6e57-4ee2-9333-8b8fbff71253", "address": "fa:16:3e:a4:3f:f7", "network": {"id": "6793e2b6-f6de-4b13-bb64-fffe633271e2", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-660033301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6ccbf7c4c744d8ab5858bf4ece0896c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0e504f4-6e", "ovs_interfaceid": "d0e504f4-6e57-4ee2-9333-8b8fbff71253", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.427751] env[62507]: DEBUG oslo_concurrency.lockutils [req-15195eef-0865-412c-bbd6-c09dd9b65fad req-08cd25bf-f08a-4985-aa28-c06df09f7635 service nova] Releasing lock "refresh_cache-65efc608-6573-4690-8d11-2f0459647d70" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.635608] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460050, 'name': CreateVM_Task, 'duration_secs': 0.276636} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.635608] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1329.635993] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.636183] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.636493] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.636774] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e497741d-51a4-4c67-933a-0dad7dd2389e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.640782] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Waiting for the task: (returnval){ [ 1329.640782] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]528d12fb-cf41-f00a-582c-ab63da1644fa" [ 1329.640782] env[62507]: _type = "Task" [ 1329.640782] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.647746] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]528d12fb-cf41-f00a-582c-ab63da1644fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.151384] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.151666] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.152725] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.868966] env[62507]: DEBUG oslo_concurrency.lockutils [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "65efc608-6573-4690-8d11-2f0459647d70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.908177] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.184743] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.167437] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.167611] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.167871] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.168731] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.169073] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1350.169073] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1350.191349] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192028] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192028] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192028] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192168] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192249] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192345] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192460] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192574] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192687] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1350.192803] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1350.193647] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.193647] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.193647] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1351.167630] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.167732] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.205334] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.205586] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.205759] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.205914] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1353.207263] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8875361-cfa6-4ced-a825-d162ce54512f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.215755] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bbfa1f-4e9a-49a0-bf18-8f7faa4b871d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.229830] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281823fe-70cf-4446-944e-8b01ed459c77 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.235588] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c94197-375f-43a0-b7b4-fbcb57119394 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.264394] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181151MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1353.264542] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.264728] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.336217] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.336288] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.336456] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.336591] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.336712] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.336829] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.336973] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.337085] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.337202] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.337315] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.349290] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e04f6623-4547-4095-a575-67eae0bbd289 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.359886] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 0600b5ad-334a-41e3-add2-4f8458040774 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.370148] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b35972e0-4a7b-4b2e-940e-2d9f40b0e55f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.380162] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef76c5ad-176d-44c0-891a-66333b42d0ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.390193] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.400988] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.411387] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance df12d1b1-3c2a-47f8-b8df-d9993acf8d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.421702] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7d0d04-1c97-40e1-824c-25d04f87e468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.431243] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 955e2c90-e317-4148-887d-e9a4eacdda2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.441744] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.451148] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.451396] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1353.451687] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1353.679762] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e6e352-1de4-4377-836e-b6e2f8221e09 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.687205] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb669043-3a7c-46ec-952c-4be76d8675b4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.717073] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fd0727-cc0c-44e6-afc2-abbf76923c64 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.723953] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dde47b9-3995-41d8-b9da-39a335368fdf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.736432] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.744678] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1353.760418] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1353.760600] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.496s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.536824] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.537107] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.022131] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "637de77e-d142-45ca-8a4e-3bf365e31502" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.022487] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "637de77e-d142-45ca-8a4e-3bf365e31502" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.114548] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ae7776d7-4d7c-44d7-a415-526aee7ef126 tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] Acquiring lock "e3b2a1ab-a686-4e28-85fd-9608f1cd6430" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.114548] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ae7776d7-4d7c-44d7-a415-526aee7ef126 tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] Lock "e3b2a1ab-a686-4e28-85fd-9608f1cd6430" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.326435] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ef2bb14a-e7bc-407f-9c83-f81122f3de0c tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "630d18e9-4769-4141-b0a8-7dd32d853be1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.326786] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ef2bb14a-e7bc-407f-9c83-f81122f3de0c tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "630d18e9-4769-4141-b0a8-7dd32d853be1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.855124] env[62507]: WARNING oslo_vmware.rw_handles [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1374.855124] env[62507]: ERROR oslo_vmware.rw_handles [ 1374.855715] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1374.857677] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1374.857928] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Copying Virtual Disk [datastore2] vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/5f9663e8-96c5-4126-8917-3879a8008397/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1374.858542] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57ea1c40-9e76-414c-a81f-47de458576ee {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.867060] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 1374.867060] env[62507]: value = "task-2460051" [ 1374.867060] env[62507]: _type = "Task" [ 1374.867060] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.875558] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': task-2460051, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.378338] env[62507]: DEBUG oslo_vmware.exceptions [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1375.378931] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.379507] env[62507]: ERROR nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1375.379507] env[62507]: Faults: ['InvalidArgument'] [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Traceback (most recent call last): [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] yield resources [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self.driver.spawn(context, instance, image_meta, [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self._fetch_image_if_missing(context, vi) [ 1375.379507] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] image_cache(vi, tmp_image_ds_loc) [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] vm_util.copy_virtual_disk( [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] session._wait_for_task(vmdk_copy_task) [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] return self.wait_for_task(task_ref) [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] return evt.wait() [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] result = hub.switch() [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1375.379823] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] return self.greenlet.switch() [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self.f(*self.args, **self.kw) [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] raise exceptions.translate_fault(task_info.error) [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Faults: ['InvalidArgument'] [ 1375.380138] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] [ 1375.380138] env[62507]: INFO nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Terminating instance [ 1375.381056] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.381271] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.381912] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59f31179-28f4-43f1-946c-5cf63f601225 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.383999] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1375.384206] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1375.385027] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd381f83-c0a3-46d4-9ecd-b0b78ef7143d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.392343] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1375.393338] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23ba2838-3050-436c-a99c-e29d565a2abf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.394695] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.394869] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1375.395521] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-113d04bd-f436-48db-89cf-0b4a40ebf649 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.400455] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Waiting for the task: (returnval){ [ 1375.400455] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52652849-9ce1-8b22-f105-361975dc7af5" [ 1375.400455] env[62507]: _type = "Task" [ 1375.400455] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.407520] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52652849-9ce1-8b22-f105-361975dc7af5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.464767] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1375.465017] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1375.465214] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Deleting the datastore file [datastore2] 01d865c8-ed85-45ec-aac6-bf923cd52dfa {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1375.465496] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e420c57c-087d-4480-b673-91f4659269b6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.471649] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 1375.471649] env[62507]: value = "task-2460053" [ 1375.471649] env[62507]: _type = "Task" [ 1375.471649] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.480121] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': task-2460053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.910635] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1375.910957] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Creating directory with path [datastore2] vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.911171] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c9b8cb5-bb0f-452c-938d-bca793d57133 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.922980] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Created directory with path [datastore2] vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.923182] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Fetch image to [datastore2] vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1375.923360] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1375.924078] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4165ff05-9bb0-41f1-872a-8187ac2140ae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.930312] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70f3329-ab1a-4b45-bea9-d074317c1681 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.939097] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be93a59-90cd-42e9-ae4e-0c8a912380c3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.968569] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904dd7c0-056a-4dd4-b298-ccc9ae17dc53 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.977024] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2dbb9e77-3b2d-4194-bf19-e5f39f2de972 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.981282] env[62507]: DEBUG oslo_vmware.api [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': task-2460053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063288} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.981859] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1375.982112] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1375.982323] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1375.982506] env[62507]: INFO nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1375.984526] env[62507]: DEBUG nova.compute.claims [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1375.984701] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.984910] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.066891] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1376.121295] env[62507]: DEBUG oslo_vmware.rw_handles [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1376.181340] env[62507]: DEBUG oslo_vmware.rw_handles [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1376.181532] env[62507]: DEBUG oslo_vmware.rw_handles [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1376.328238] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322be8ff-0ec5-4f4c-ac56-ca61b15fa76e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.335477] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c1b56b-c908-4478-a16f-0fc0741f1184 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.364738] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a146a166-d602-4653-a680-796900e687af {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.371676] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03859192-8e50-4b8e-9e1e-bde5633118d1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.384474] env[62507]: DEBUG nova.compute.provider_tree [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.392898] env[62507]: DEBUG nova.scheduler.client.report [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1376.406479] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.421s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.407010] env[62507]: ERROR nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1376.407010] env[62507]: Faults: ['InvalidArgument'] [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Traceback (most recent call last): [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self.driver.spawn(context, instance, image_meta, [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self._fetch_image_if_missing(context, vi) [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] image_cache(vi, tmp_image_ds_loc) [ 1376.407010] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] vm_util.copy_virtual_disk( [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] session._wait_for_task(vmdk_copy_task) [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] return self.wait_for_task(task_ref) [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] return evt.wait() [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] result = hub.switch() [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] return self.greenlet.switch() [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1376.407351] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] self.f(*self.args, **self.kw) [ 1376.407821] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1376.407821] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] raise exceptions.translate_fault(task_info.error) [ 1376.407821] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1376.407821] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Faults: ['InvalidArgument'] [ 1376.407821] env[62507]: ERROR nova.compute.manager [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] [ 1376.407821] env[62507]: DEBUG nova.compute.utils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1376.409384] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Build of instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa was re-scheduled: A specified parameter was not correct: fileType [ 1376.409384] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1376.409834] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1376.410017] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1376.410200] env[62507]: DEBUG nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1376.410364] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1376.753733] env[62507]: DEBUG nova.network.neutron [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.764498] env[62507]: INFO nova.compute.manager [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Took 0.35 seconds to deallocate network for instance. [ 1376.852658] env[62507]: INFO nova.scheduler.client.report [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Deleted allocations for instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa [ 1376.871459] env[62507]: DEBUG oslo_concurrency.lockutils [None req-84f227f9-f0a6-4f61-930a-d3dc69debebc tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 599.469s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.872606] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 403.760s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.872842] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.873109] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.873222] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.875209] env[62507]: INFO nova.compute.manager [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Terminating instance [ 1376.878219] env[62507]: DEBUG nova.compute.manager [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1376.878424] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1376.878738] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1a05548-3726-4c3d-a9b7-9fe682e69d27 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.890045] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa59d7ae-a13c-4488-abb9-2020f1f01c44 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.901769] env[62507]: DEBUG nova.compute.manager [None req-53c2f539-908b-4a74-ab4b-a1d413892359 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: e04f6623-4547-4095-a575-67eae0bbd289] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1376.926918] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01d865c8-ed85-45ec-aac6-bf923cd52dfa could not be found. [ 1376.927170] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1376.927322] env[62507]: INFO nova.compute.manager [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1376.927572] env[62507]: DEBUG oslo.service.loopingcall [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1376.927807] env[62507]: DEBUG nova.compute.manager [-] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1376.927905] env[62507]: DEBUG nova.network.neutron [-] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1376.930270] env[62507]: DEBUG nova.compute.manager [None req-53c2f539-908b-4a74-ab4b-a1d413892359 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: e04f6623-4547-4095-a575-67eae0bbd289] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1376.949449] env[62507]: DEBUG oslo_concurrency.lockutils [None req-53c2f539-908b-4a74-ab4b-a1d413892359 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "e04f6623-4547-4095-a575-67eae0bbd289" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.258s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.950815] env[62507]: DEBUG nova.network.neutron [-] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.957334] env[62507]: DEBUG nova.compute.manager [None req-f683e774-3ab9-4d87-b763-dc93afafb0bf tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] [instance: 0600b5ad-334a-41e3-add2-4f8458040774] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1376.959945] env[62507]: INFO nova.compute.manager [-] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] Took 0.03 seconds to deallocate network for instance. [ 1376.980979] env[62507]: DEBUG nova.compute.manager [None req-f683e774-3ab9-4d87-b763-dc93afafb0bf tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] [instance: 0600b5ad-334a-41e3-add2-4f8458040774] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1377.002834] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f683e774-3ab9-4d87-b763-dc93afafb0bf tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] Lock "0600b5ad-334a-41e3-add2-4f8458040774" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.096s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.014942] env[62507]: DEBUG nova.compute.manager [None req-8272ccb2-8152-4e44-8d4c-7f9f664557a9 tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] [instance: b35972e0-4a7b-4b2e-940e-2d9f40b0e55f] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1377.046025] env[62507]: DEBUG nova.compute.manager [None req-8272ccb2-8152-4e44-8d4c-7f9f664557a9 tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] [instance: b35972e0-4a7b-4b2e-940e-2d9f40b0e55f] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1377.064936] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4b7f9dc9-e837-4672-8bba-3e26b3effced tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.065762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 265.646s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.065952] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01d865c8-ed85-45ec-aac6-bf923cd52dfa] During sync_power_state the instance has a pending task (deleting). Skip. [ 1377.066140] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "01d865c8-ed85-45ec-aac6-bf923cd52dfa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.073157] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8272ccb2-8152-4e44-8d4c-7f9f664557a9 tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] Lock "b35972e0-4a7b-4b2e-940e-2d9f40b0e55f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.431s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.082752] env[62507]: DEBUG nova.compute.manager [None req-d9f1f7e4-34e9-45f6-b2a1-309455c342b1 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: ef76c5ad-176d-44c0-891a-66333b42d0ce] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1377.122678] env[62507]: DEBUG nova.compute.manager [None req-d9f1f7e4-34e9-45f6-b2a1-309455c342b1 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: ef76c5ad-176d-44c0-891a-66333b42d0ce] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1377.150437] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d9f1f7e4-34e9-45f6-b2a1-309455c342b1 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "ef76c5ad-176d-44c0-891a-66333b42d0ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.218s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.163114] env[62507]: DEBUG nova.compute.manager [None req-aaaf678d-76c9-4d6a-9f84-d2c3f5bd1cdb tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] [instance: a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1377.188608] env[62507]: DEBUG nova.compute.manager [None req-aaaf678d-76c9-4d6a-9f84-d2c3f5bd1cdb tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] [instance: a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1377.209562] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aaaf678d-76c9-4d6a-9f84-d2c3f5bd1cdb tempest-ListImageFiltersTestJSON-995645754 tempest-ListImageFiltersTestJSON-995645754-project-member] Lock "a06c8cb4-ac81-44bf-bf6b-fa6c75fcb369" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.200s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.221150] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1377.273017] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.274847] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.274847] env[62507]: INFO nova.compute.claims [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.537038] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1b9009-aada-4cd6-ac4c-c86953e02845 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.544725] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25592c5b-687c-4642-b8c1-14f09e95526d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.573220] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d83fcbc-bd3e-4564-a45d-d7ae23157bb5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.579933] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669ba63c-0d0e-475a-b414-2c9ee69b1a10 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.592681] env[62507]: DEBUG nova.compute.provider_tree [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.603680] env[62507]: DEBUG nova.scheduler.client.report [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1377.617292] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.344s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.617760] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1377.648087] env[62507]: DEBUG nova.compute.utils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.649529] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1377.649666] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1377.659939] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1377.702062] env[62507]: DEBUG nova.policy [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8187d3d405c244f995763c4d67515b6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c850b58d9b554e81b09f26703a6f50f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1377.719860] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1377.747190] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1377.747439] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1377.747599] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1377.747783] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1377.747929] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1377.748091] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1377.748300] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1377.748480] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1377.748655] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1377.748826] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1377.749008] env[62507]: DEBUG nova.virt.hardware [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1377.749878] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791b2032-f3c4-4b92-883e-1100c8641708 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.758031] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d298394-21ab-4a41-bf53-f7b353353db8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.009393] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Successfully created port: 649a6fd4-a273-4713-a79b-7e244ab0ca8a {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1378.834396] env[62507]: DEBUG nova.compute.manager [req-0094a1b0-6861-44ad-96ac-4cadb4ba336c req-a6f208b1-208f-4cb5-8144-88f91dc90eb9 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Received event network-vif-plugged-649a6fd4-a273-4713-a79b-7e244ab0ca8a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1378.834621] env[62507]: DEBUG oslo_concurrency.lockutils [req-0094a1b0-6861-44ad-96ac-4cadb4ba336c req-a6f208b1-208f-4cb5-8144-88f91dc90eb9 service nova] Acquiring lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.834829] env[62507]: DEBUG oslo_concurrency.lockutils [req-0094a1b0-6861-44ad-96ac-4cadb4ba336c req-a6f208b1-208f-4cb5-8144-88f91dc90eb9 service nova] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.834997] env[62507]: DEBUG oslo_concurrency.lockutils [req-0094a1b0-6861-44ad-96ac-4cadb4ba336c req-a6f208b1-208f-4cb5-8144-88f91dc90eb9 service nova] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.835178] env[62507]: DEBUG nova.compute.manager [req-0094a1b0-6861-44ad-96ac-4cadb4ba336c req-a6f208b1-208f-4cb5-8144-88f91dc90eb9 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] No waiting events found dispatching network-vif-plugged-649a6fd4-a273-4713-a79b-7e244ab0ca8a {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1378.835344] env[62507]: WARNING nova.compute.manager [req-0094a1b0-6861-44ad-96ac-4cadb4ba336c req-a6f208b1-208f-4cb5-8144-88f91dc90eb9 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Received unexpected event network-vif-plugged-649a6fd4-a273-4713-a79b-7e244ab0ca8a for instance with vm_state building and task_state spawning. [ 1378.913219] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Successfully updated port: 649a6fd4-a273-4713-a79b-7e244ab0ca8a {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1378.924525] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "refresh_cache-4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.924676] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "refresh_cache-4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.924854] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1378.983040] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1379.171175] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Updating instance_info_cache with network_info: [{"id": "649a6fd4-a273-4713-a79b-7e244ab0ca8a", "address": "fa:16:3e:5c:95:32", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap649a6fd4-a2", "ovs_interfaceid": "649a6fd4-a273-4713-a79b-7e244ab0ca8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.181832] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "refresh_cache-4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.182113] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance network_info: |[{"id": "649a6fd4-a273-4713-a79b-7e244ab0ca8a", "address": "fa:16:3e:5c:95:32", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap649a6fd4-a2", "ovs_interfaceid": "649a6fd4-a273-4713-a79b-7e244ab0ca8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1379.182506] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:95:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '649a6fd4-a273-4713-a79b-7e244ab0ca8a', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1379.190170] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating folder: Project (c850b58d9b554e81b09f26703a6f50f1). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1379.190661] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ec4934d-bb23-4ac6-b55a-5e89a7e6313c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.200877] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Created folder: Project (c850b58d9b554e81b09f26703a6f50f1) in parent group-v497991. [ 1379.201083] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating folder: Instances. Parent ref: group-v498077. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1379.201298] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40ad0c32-437d-4e11-9a5c-0a104f830237 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.208922] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Created folder: Instances in parent group-v498077. [ 1379.209152] env[62507]: DEBUG oslo.service.loopingcall [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.209330] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1379.209529] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdfb63bd-243f-4a08-9b52-04acda9f7e4c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.226856] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1379.226856] env[62507]: value = "task-2460056" [ 1379.226856] env[62507]: _type = "Task" [ 1379.226856] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.233730] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460056, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.736735] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460056, 'name': CreateVM_Task, 'duration_secs': 0.285131} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.736939] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1379.737593] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.737756] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.738083] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1379.738325] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14d08eb1-6352-461b-8615-62c9ec686cb2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.742845] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 1379.742845] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52c2bbef-6d40-19ef-3896-6be6bc4e2016" [ 1379.742845] env[62507]: _type = "Task" [ 1379.742845] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.750293] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52c2bbef-6d40-19ef-3896-6be6bc4e2016, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.253228] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.253593] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1380.253689] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.885986] env[62507]: DEBUG nova.compute.manager [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Received event network-changed-649a6fd4-a273-4713-a79b-7e244ab0ca8a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1380.886177] env[62507]: DEBUG nova.compute.manager [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Refreshing instance network info cache due to event network-changed-649a6fd4-a273-4713-a79b-7e244ab0ca8a. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1380.886389] env[62507]: DEBUG oslo_concurrency.lockutils [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] Acquiring lock "refresh_cache-4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.886559] env[62507]: DEBUG oslo_concurrency.lockutils [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] Acquired lock "refresh_cache-4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.886741] env[62507]: DEBUG nova.network.neutron [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Refreshing network info cache for port 649a6fd4-a273-4713-a79b-7e244ab0ca8a {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1381.143299] env[62507]: DEBUG nova.network.neutron [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Updated VIF entry in instance network info cache for port 649a6fd4-a273-4713-a79b-7e244ab0ca8a. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1381.143664] env[62507]: DEBUG nova.network.neutron [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Updating instance_info_cache with network_info: [{"id": "649a6fd4-a273-4713-a79b-7e244ab0ca8a", "address": "fa:16:3e:5c:95:32", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap649a6fd4-a2", "ovs_interfaceid": "649a6fd4-a273-4713-a79b-7e244ab0ca8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.155674] env[62507]: DEBUG oslo_concurrency.lockutils [req-ff040282-e454-4ca5-9b2d-0fb1937a705f req-42044514-204d-4c5d-846a-405ca8cb68c7 service nova] Releasing lock "refresh_cache-4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.828542] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.828773] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.004241] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.756409] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.167572] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.167861] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.167579] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.167869] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1410.167910] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1410.191756] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.191913] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192057] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192185] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192318] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192440] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192685] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192685] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192784] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.192995] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1410.193161] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1410.193648] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.167382] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.168734] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.169017] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.169172] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1413.168718] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.180671] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.180993] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.181088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.181269] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1413.182423] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30e29cf-4940-466e-8a64-b5c19a0b722d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.191113] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3d51c6-af71-4e5c-897c-40fcc00cbe2e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.204441] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5a3d1c-e726-43aa-b9d1-3005b9ec24c1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.210751] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15053d6f-1086-4040-b18d-278e98c3f105 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.239846] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181178MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1413.239998] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.240207] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.412472] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.412639] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.412769] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.412896] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.413028] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.413153] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.413272] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.413387] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.413501] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.413613] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.425620] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.435954] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.445366] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.455211] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.466452] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e3b2a1ab-a686-4e28-85fd-9608f1cd6430 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.479339] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 630d18e9-4769-4141-b0a8-7dd32d853be1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.490012] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.490253] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1413.490404] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1413.510113] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing inventories for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1413.525493] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating ProviderTree inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1413.525688] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1413.538384] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing aggregate associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, aggregates: None {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1413.559511] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing trait associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1413.769851] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32545ab9-cc1a-42ea-b726-76fa9b494238 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.777635] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635e0c36-67c3-4952-97c9-f1a7dc546c61 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.807876] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acaf430a-13c0-4483-9fa9-d0837887ddae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.815183] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf3369d-4331-434c-857b-471bf983b780 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.828378] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.837241] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1413.854142] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1413.854338] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.614s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.167326] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.167517] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances with incomplete migration {{(pid=62507) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1415.168515] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1422.178625] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1422.178908] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1422.187929] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] There are 0 instances to clean {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1422.947084] env[62507]: WARNING oslo_vmware.rw_handles [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1422.947084] env[62507]: ERROR oslo_vmware.rw_handles [ 1422.947523] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1422.949898] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1422.950203] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Copying Virtual Disk [datastore2] vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/1a975703-a881-4523-903f-abdb0affcfc8/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1422.950503] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-565d0fde-5fbc-4459-840d-8aa1bd2521f6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.957945] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Waiting for the task: (returnval){ [ 1422.957945] env[62507]: value = "task-2460057" [ 1422.957945] env[62507]: _type = "Task" [ 1422.957945] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.967963] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Task: {'id': task-2460057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.468678] env[62507]: DEBUG oslo_vmware.exceptions [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1423.469035] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.469422] env[62507]: ERROR nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1423.469422] env[62507]: Faults: ['InvalidArgument'] [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Traceback (most recent call last): [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] yield resources [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self.driver.spawn(context, instance, image_meta, [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self._fetch_image_if_missing(context, vi) [ 1423.469422] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] image_cache(vi, tmp_image_ds_loc) [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] vm_util.copy_virtual_disk( [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] session._wait_for_task(vmdk_copy_task) [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] return self.wait_for_task(task_ref) [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] return evt.wait() [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] result = hub.switch() [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1423.469763] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] return self.greenlet.switch() [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self.f(*self.args, **self.kw) [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] raise exceptions.translate_fault(task_info.error) [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Faults: ['InvalidArgument'] [ 1423.470110] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] [ 1423.470110] env[62507]: INFO nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Terminating instance [ 1423.471397] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.471627] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1423.471870] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c47a638b-0675-41b3-ac03-ad909446ec00 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.474138] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1423.474336] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1423.475063] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98be51cc-a00c-4a45-9e41-42fde792a968 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.481857] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1423.482082] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c591a0a-e117-458c-aaf1-301576cae16f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.484220] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1423.484395] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1423.485323] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5cf41ff-5e59-4a5f-9e9a-aedf80ce9b10 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.489785] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Waiting for the task: (returnval){ [ 1423.489785] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52be0b6e-0e2c-245a-33d2-2e403ed17688" [ 1423.489785] env[62507]: _type = "Task" [ 1423.489785] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.497507] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52be0b6e-0e2c-245a-33d2-2e403ed17688, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.545432] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1423.545787] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1423.545989] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Deleting the datastore file [datastore2] b866307e-f0e9-40d0-8603-fbfb9e2ee15a {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1423.546295] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dabef07d-78ea-4ef3-8e12-23cea64d9971 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.552211] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Waiting for the task: (returnval){ [ 1423.552211] env[62507]: value = "task-2460059" [ 1423.552211] env[62507]: _type = "Task" [ 1423.552211] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.560482] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Task: {'id': task-2460059, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.000195] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1424.000406] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Creating directory with path [datastore2] vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.000628] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12daaa5e-c70e-45cb-8805-e134b416e3de {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.011571] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Created directory with path [datastore2] vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.011747] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Fetch image to [datastore2] vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1424.011920] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1424.012620] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310318b8-d4f7-4fd2-b194-29c0b31c9231 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.018654] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfcd441-836e-4222-a401-c6127208b688 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.027407] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b391a9-32a8-4c34-9f45-c82e45beb616 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.059490] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192db451-006c-4a84-a6ca-ef5f1dd898e6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.066100] env[62507]: DEBUG oslo_vmware.api [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Task: {'id': task-2460059, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081538} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.067448] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.067637] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1424.067807] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1424.067980] env[62507]: INFO nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1424.069735] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae17627e-ea86-4f08-948d-7ae5b1dcc332 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.071534] env[62507]: DEBUG nova.compute.claims [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1424.071715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.071924] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.095618] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1424.145939] env[62507]: DEBUG oslo_vmware.rw_handles [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1424.207876] env[62507]: DEBUG oslo_vmware.rw_handles [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1424.208124] env[62507]: DEBUG oslo_vmware.rw_handles [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1424.367777] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40621887-cf58-4dbf-9012-0ad7490a9d46 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.375663] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ecb64b-0e53-4720-86eb-08342db75135 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.405328] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b740f52-1c54-4cf2-9010-1cfb51ff4298 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.412105] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a2c1f3-6e5a-485a-8f05-5f0c78f43110 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.425120] env[62507]: DEBUG nova.compute.provider_tree [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.456736] env[62507]: DEBUG nova.scheduler.client.report [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1424.470444] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.398s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.470969] env[62507]: ERROR nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1424.470969] env[62507]: Faults: ['InvalidArgument'] [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Traceback (most recent call last): [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self.driver.spawn(context, instance, image_meta, [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self._fetch_image_if_missing(context, vi) [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] image_cache(vi, tmp_image_ds_loc) [ 1424.470969] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] vm_util.copy_virtual_disk( [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] session._wait_for_task(vmdk_copy_task) [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] return self.wait_for_task(task_ref) [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] return evt.wait() [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] result = hub.switch() [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] return self.greenlet.switch() [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1424.471326] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] self.f(*self.args, **self.kw) [ 1424.471677] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1424.471677] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] raise exceptions.translate_fault(task_info.error) [ 1424.471677] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1424.471677] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Faults: ['InvalidArgument'] [ 1424.471677] env[62507]: ERROR nova.compute.manager [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] [ 1424.471820] env[62507]: DEBUG nova.compute.utils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1424.473630] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Build of instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a was re-scheduled: A specified parameter was not correct: fileType [ 1424.473630] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1424.474047] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1424.474234] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1424.474409] env[62507]: DEBUG nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1424.474595] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1424.929060] env[62507]: DEBUG nova.network.neutron [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.939756] env[62507]: INFO nova.compute.manager [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Took 0.47 seconds to deallocate network for instance. [ 1425.030841] env[62507]: INFO nova.scheduler.client.report [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Deleted allocations for instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a [ 1425.053839] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15412f81-0ef7-45b6-9759-7e25ea2a5603 tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.458s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.055284] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.105s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.056020] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Acquiring lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.056020] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.056020] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.057807] env[62507]: INFO nova.compute.manager [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Terminating instance [ 1425.059521] env[62507]: DEBUG nova.compute.manager [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1425.059714] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1425.060417] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd9553c1-e945-47ac-89d8-1961672e969a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.064583] env[62507]: DEBUG nova.compute.manager [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] [instance: df12d1b1-3c2a-47f8-b8df-d9993acf8d82] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1425.071326] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d55d7f8-4f26-4313-9428-f695cc057051 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.089220] env[62507]: DEBUG nova.compute.manager [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] [instance: df12d1b1-3c2a-47f8-b8df-d9993acf8d82] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1425.101292] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b866307e-f0e9-40d0-8603-fbfb9e2ee15a could not be found. [ 1425.101575] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1425.101704] env[62507]: INFO nova.compute.manager [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1425.101948] env[62507]: DEBUG oslo.service.loopingcall [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.104083] env[62507]: DEBUG nova.compute.manager [-] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1425.104162] env[62507]: DEBUG nova.network.neutron [-] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1425.115426] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Lock "df12d1b1-3c2a-47f8-b8df-d9993acf8d82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.839s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.126205] env[62507]: DEBUG nova.compute.manager [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] [instance: fb7d0d04-1c97-40e1-824c-25d04f87e468] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1425.131633] env[62507]: DEBUG nova.network.neutron [-] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.139957] env[62507]: INFO nova.compute.manager [-] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] Took 0.04 seconds to deallocate network for instance. [ 1425.154785] env[62507]: DEBUG nova.compute.manager [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] [instance: fb7d0d04-1c97-40e1-824c-25d04f87e468] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1425.182344] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Lock "fb7d0d04-1c97-40e1-824c-25d04f87e468" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.875s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.192819] env[62507]: DEBUG nova.compute.manager [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] [instance: 955e2c90-e317-4148-887d-e9a4eacdda2a] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1425.215858] env[62507]: DEBUG nova.compute.manager [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] [instance: 955e2c90-e317-4148-887d-e9a4eacdda2a] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1425.233324] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee0a0459-3973-4f2b-8d10-b51379d3996f tempest-ServerGroupTestJSON-185694910 tempest-ServerGroupTestJSON-185694910-project-member] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.234146] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 313.814s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.234512] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b866307e-f0e9-40d0-8603-fbfb9e2ee15a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1425.234512] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "b866307e-f0e9-40d0-8603-fbfb9e2ee15a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.236611] env[62507]: DEBUG oslo_concurrency.lockutils [None req-61535086-6fb5-4446-a00c-7a8efcdd79ed tempest-ListServersNegativeTestJSON-380677659 tempest-ListServersNegativeTestJSON-380677659-project-member] Lock "955e2c90-e317-4148-887d-e9a4eacdda2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.893s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.245814] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1425.294722] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.294816] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.296442] env[62507]: INFO nova.compute.claims [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1425.515606] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b5d7a9-d0ad-4011-8c4e-e29cbbd6cec9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.523145] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d942ee5-50d2-42f9-a2b6-e018c71af9aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.552710] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc2398a-1063-41e5-9f02-27b719c380ab {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.559361] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac61ffa8-72a5-4e27-b6ac-54b37cd6733d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.572183] env[62507]: DEBUG nova.compute.provider_tree [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.580848] env[62507]: DEBUG nova.scheduler.client.report [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1425.596888] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.597467] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1425.628772] env[62507]: DEBUG nova.compute.utils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.630298] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1425.630478] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1425.639765] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1425.701716] env[62507]: DEBUG nova.policy [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41d4a4978bef499eba41ef52b77b3baf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8854a0405ae4d12a3cd11a9bb806015', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1425.704856] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1425.730634] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1425.730876] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1425.731047] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.731240] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1425.731387] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.731536] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1425.731746] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1425.731980] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1425.732187] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1425.732359] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1425.732538] env[62507]: DEBUG nova.virt.hardware [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1425.733471] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e73525-f84b-4ec3-a514-ceb140826e15 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.742480] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3b12aa-64c7-46f7-a550-b926efe4ee96 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.255077] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Successfully created port: 35a2756c-5227-4463-b087-7de6c5c657aa {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1426.920871] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Successfully updated port: 35a2756c-5227-4463-b087-7de6c5c657aa {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1426.934294] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "refresh_cache-01043570-d72d-4a97-8c51-cfe30b25b82b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.934450] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquired lock "refresh_cache-01043570-d72d-4a97-8c51-cfe30b25b82b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.934601] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1426.967492] env[62507]: DEBUG nova.compute.manager [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Received event network-vif-plugged-35a2756c-5227-4463-b087-7de6c5c657aa {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1426.967790] env[62507]: DEBUG oslo_concurrency.lockutils [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] Acquiring lock "01043570-d72d-4a97-8c51-cfe30b25b82b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.968059] env[62507]: DEBUG oslo_concurrency.lockutils [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.968294] env[62507]: DEBUG oslo_concurrency.lockutils [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.970466] env[62507]: DEBUG nova.compute.manager [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] No waiting events found dispatching network-vif-plugged-35a2756c-5227-4463-b087-7de6c5c657aa {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1426.970466] env[62507]: WARNING nova.compute.manager [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Received unexpected event network-vif-plugged-35a2756c-5227-4463-b087-7de6c5c657aa for instance with vm_state building and task_state spawning. [ 1426.970466] env[62507]: DEBUG nova.compute.manager [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Received event network-changed-35a2756c-5227-4463-b087-7de6c5c657aa {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1426.970466] env[62507]: DEBUG nova.compute.manager [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Refreshing instance network info cache due to event network-changed-35a2756c-5227-4463-b087-7de6c5c657aa. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1426.970466] env[62507]: DEBUG oslo_concurrency.lockutils [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] Acquiring lock "refresh_cache-01043570-d72d-4a97-8c51-cfe30b25b82b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.995169] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1427.246735] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Updating instance_info_cache with network_info: [{"id": "35a2756c-5227-4463-b087-7de6c5c657aa", "address": "fa:16:3e:0f:d7:2f", "network": {"id": "2969e378-85ee-4ff9-bdf9-9201c31310ce", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2046717049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8854a0405ae4d12a3cd11a9bb806015", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a2756c-52", "ovs_interfaceid": "35a2756c-5227-4463-b087-7de6c5c657aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.261704] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Releasing lock "refresh_cache-01043570-d72d-4a97-8c51-cfe30b25b82b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.262072] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance network_info: |[{"id": "35a2756c-5227-4463-b087-7de6c5c657aa", "address": "fa:16:3e:0f:d7:2f", "network": {"id": "2969e378-85ee-4ff9-bdf9-9201c31310ce", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2046717049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8854a0405ae4d12a3cd11a9bb806015", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a2756c-52", "ovs_interfaceid": "35a2756c-5227-4463-b087-7de6c5c657aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1427.262413] env[62507]: DEBUG oslo_concurrency.lockutils [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] Acquired lock "refresh_cache-01043570-d72d-4a97-8c51-cfe30b25b82b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.262616] env[62507]: DEBUG nova.network.neutron [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Refreshing network info cache for port 35a2756c-5227-4463-b087-7de6c5c657aa {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1427.263673] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:d7:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4f91f31-0516-4d62-a341-e03a50b7c477', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35a2756c-5227-4463-b087-7de6c5c657aa', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1427.271719] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Creating folder: Project (d8854a0405ae4d12a3cd11a9bb806015). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1427.274690] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-948351dc-8c72-4e42-a229-3e3ae5f59b12 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.286154] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Created folder: Project (d8854a0405ae4d12a3cd11a9bb806015) in parent group-v497991. [ 1427.286349] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Creating folder: Instances. Parent ref: group-v498080. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1427.286884] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbf14438-e093-48da-8f47-6535ba0b121a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.296537] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Created folder: Instances in parent group-v498080. [ 1427.296843] env[62507]: DEBUG oslo.service.loopingcall [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.297091] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1427.297204] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1618cb09-ae47-4b75-b77b-953ef05a9883 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.319012] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1427.319012] env[62507]: value = "task-2460062" [ 1427.319012] env[62507]: _type = "Task" [ 1427.319012] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.328159] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460062, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.551846] env[62507]: DEBUG nova.network.neutron [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Updated VIF entry in instance network info cache for port 35a2756c-5227-4463-b087-7de6c5c657aa. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1427.552221] env[62507]: DEBUG nova.network.neutron [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Updating instance_info_cache with network_info: [{"id": "35a2756c-5227-4463-b087-7de6c5c657aa", "address": "fa:16:3e:0f:d7:2f", "network": {"id": "2969e378-85ee-4ff9-bdf9-9201c31310ce", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2046717049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8854a0405ae4d12a3cd11a9bb806015", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35a2756c-52", "ovs_interfaceid": "35a2756c-5227-4463-b087-7de6c5c657aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.562513] env[62507]: DEBUG oslo_concurrency.lockutils [req-feb646b7-5036-41bb-a5fd-dba565a574c5 req-1c20bedc-6ed7-48bc-867e-382150b35005 service nova] Releasing lock "refresh_cache-01043570-d72d-4a97-8c51-cfe30b25b82b" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.829240] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460062, 'name': CreateVM_Task, 'duration_secs': 0.302998} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.829409] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1427.830123] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.830293] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.830607] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1427.830853] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6a8b7b4-89f6-4e1d-a5fc-693975da0dd2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.834957] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Waiting for the task: (returnval){ [ 1427.834957] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52771aef-c723-7e1f-2fdc-86a0b8649995" [ 1427.834957] env[62507]: _type = "Task" [ 1427.834957] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.841994] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52771aef-c723-7e1f-2fdc-86a0b8649995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.345068] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.345447] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1428.345569] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.042154] env[62507]: DEBUG oslo_concurrency.lockutils [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "01043570-d72d-4a97-8c51-cfe30b25b82b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.173066] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.162618] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.167457] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.167764] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1470.167764] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1470.187865] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.188177] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.188395] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.188564] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.188722] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.188876] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.189083] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.189266] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.189402] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.189528] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1470.189650] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1470.190177] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.753629] env[62507]: WARNING oslo_vmware.rw_handles [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1470.753629] env[62507]: ERROR oslo_vmware.rw_handles [ 1470.754052] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1470.755989] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1470.756249] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Copying Virtual Disk [datastore2] vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/4d535844-aa49-4cdf-a36e-d04233bb950d/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1470.756535] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-192b8326-8fd1-4878-87ef-2951709d02c7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.764665] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Waiting for the task: (returnval){ [ 1470.764665] env[62507]: value = "task-2460063" [ 1470.764665] env[62507]: _type = "Task" [ 1470.764665] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.772657] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Task: {'id': task-2460063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.167758] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.275158] env[62507]: DEBUG oslo_vmware.exceptions [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1471.275395] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.275999] env[62507]: ERROR nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1471.275999] env[62507]: Faults: ['InvalidArgument'] [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Traceback (most recent call last): [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] yield resources [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self.driver.spawn(context, instance, image_meta, [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self._fetch_image_if_missing(context, vi) [ 1471.275999] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] image_cache(vi, tmp_image_ds_loc) [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] vm_util.copy_virtual_disk( [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] session._wait_for_task(vmdk_copy_task) [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] return self.wait_for_task(task_ref) [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] return evt.wait() [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] result = hub.switch() [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1471.276386] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] return self.greenlet.switch() [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self.f(*self.args, **self.kw) [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] raise exceptions.translate_fault(task_info.error) [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Faults: ['InvalidArgument'] [ 1471.276756] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] [ 1471.276756] env[62507]: INFO nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Terminating instance [ 1471.277931] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.278154] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.278390] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d59b9ec-25b3-42bd-a83f-3ceaa03c94cc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.280538] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1471.280731] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1471.281469] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae40f9-b003-4142-b24a-dde36b2d2759 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.288056] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1471.288262] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eca6780e-fea7-4fc8-bc98-66a879ff0fea {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.290358] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.290523] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1471.291461] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4581b41-445f-495a-a886-a9ac0d693b88 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.295927] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1471.295927] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529bf7e0-00dd-09bb-f383-c63789cbb0e8" [ 1471.295927] env[62507]: _type = "Task" [ 1471.295927] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.302727] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529bf7e0-00dd-09bb-f383-c63789cbb0e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.350274] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1471.350482] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1471.350665] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Deleting the datastore file [datastore2] 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1471.350922] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dbba47b-d79d-4802-96fb-2fc18d4fafe8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.356604] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Waiting for the task: (returnval){ [ 1471.356604] env[62507]: value = "task-2460065" [ 1471.356604] env[62507]: _type = "Task" [ 1471.356604] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.364218] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Task: {'id': task-2460065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.806157] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1471.807031] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating directory with path [datastore2] vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.807031] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcc3aef8-c6dd-4a10-a63e-7b50c539a0aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.817943] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created directory with path [datastore2] vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.818160] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Fetch image to [datastore2] vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1471.818339] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1471.819125] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7fab57-6452-49d8-9fde-19805b53eef5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.825708] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18d61c3-414d-41d0-81f0-0c60a8ec745f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.834747] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a42b8f-6239-4fbf-81ab-181130bafe25 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.868354] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fffe722-f270-4878-b890-38bc7843b138 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.875227] env[62507]: DEBUG oslo_vmware.api [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Task: {'id': task-2460065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078356} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.876704] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1471.876899] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1471.877091] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1471.877276] env[62507]: INFO nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1471.879068] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-65609a0d-63d5-4d3d-8355-c8e765da4ae7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.880980] env[62507]: DEBUG nova.compute.claims [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1471.882029] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.882029] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.903858] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1471.955235] env[62507]: DEBUG oslo_vmware.rw_handles [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1472.017902] env[62507]: DEBUG oslo_vmware.rw_handles [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1472.018103] env[62507]: DEBUG oslo_vmware.rw_handles [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1472.151144] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055e7b4c-46a0-4873-8f07-8ff581ca27be {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.158415] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9054779-9b56-42a6-9d79-80fcdb5f5543 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.186304] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.187238] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dc096b-3cfc-4942-9e85-e12a1df7b544 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.193780] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45fa265-ad78-4d83-ba16-75b4daaaafe6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.206122] env[62507]: DEBUG nova.compute.provider_tree [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.216341] env[62507]: DEBUG nova.scheduler.client.report [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1472.230700] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.349s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.231246] env[62507]: ERROR nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1472.231246] env[62507]: Faults: ['InvalidArgument'] [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Traceback (most recent call last): [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self.driver.spawn(context, instance, image_meta, [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self._fetch_image_if_missing(context, vi) [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] image_cache(vi, tmp_image_ds_loc) [ 1472.231246] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] vm_util.copy_virtual_disk( [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] session._wait_for_task(vmdk_copy_task) [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] return self.wait_for_task(task_ref) [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] return evt.wait() [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] result = hub.switch() [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] return self.greenlet.switch() [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1472.231614] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] self.f(*self.args, **self.kw) [ 1472.231939] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1472.231939] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] raise exceptions.translate_fault(task_info.error) [ 1472.231939] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1472.231939] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Faults: ['InvalidArgument'] [ 1472.231939] env[62507]: ERROR nova.compute.manager [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] [ 1472.231939] env[62507]: DEBUG nova.compute.utils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1472.233705] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Build of instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef was re-scheduled: A specified parameter was not correct: fileType [ 1472.233705] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1472.234082] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1472.234296] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1472.234602] env[62507]: DEBUG nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1472.234852] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1472.647390] env[62507]: DEBUG nova.network.neutron [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.665877] env[62507]: INFO nova.compute.manager [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Took 0.43 seconds to deallocate network for instance. [ 1472.772024] env[62507]: INFO nova.scheduler.client.report [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Deleted allocations for instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef [ 1472.794447] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2820d484-1cc8-4c51-af9e-0cb90bcab82b tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.337s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.795715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.290s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.795981] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.796281] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.796503] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.798558] env[62507]: INFO nova.compute.manager [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Terminating instance [ 1472.800235] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquiring lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.800402] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Acquired lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.800572] env[62507]: DEBUG nova.network.neutron [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1472.816030] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1472.827193] env[62507]: DEBUG nova.network.neutron [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1472.874303] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.874570] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.876112] env[62507]: INFO nova.compute.claims [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.013748] env[62507]: DEBUG nova.network.neutron [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.023313] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Releasing lock "refresh_cache-7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.023722] env[62507]: DEBUG nova.compute.manager [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1473.023917] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1473.024497] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfa41234-4c3d-41d4-ab22-08d16938a202 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.035729] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21de80a-732b-43de-8dfd-24052ac28d2d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.068380] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef could not be found. [ 1473.068592] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1473.068773] env[62507]: INFO nova.compute.manager [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1473.069021] env[62507]: DEBUG oslo.service.loopingcall [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.071256] env[62507]: DEBUG nova.compute.manager [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1473.071378] env[62507]: DEBUG nova.network.neutron [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1473.088558] env[62507]: DEBUG nova.network.neutron [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1473.096506] env[62507]: DEBUG nova.network.neutron [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.105959] env[62507]: INFO nova.compute.manager [-] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] Took 0.03 seconds to deallocate network for instance. [ 1473.126230] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d80d2c-c93c-4cf4-b071-6bf9629379df {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.134350] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a08ba7a-948e-4078-9447-de843d810203 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.168307] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df88135-972e-41ba-8788-a3e4a650f2b0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.170974] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.171425] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.176618] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eac68a-f82c-46d0-b2c3-7b2d5cb0096f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.191720] env[62507]: DEBUG nova.compute.provider_tree [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.200560] env[62507]: DEBUG nova.scheduler.client.report [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1473.217223] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.217681] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1473.228072] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3009ddf5-d72b-4a63-86dd-b1d432ae5f49 tempest-ServersV294TestFqdnHostnames-245486048 tempest-ServersV294TestFqdnHostnames-245486048-project-member] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.432s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.228910] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 361.809s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.229163] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef] During sync_power_state the instance has a pending task (deleting). Skip. [ 1473.229351] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "7b52bea8-cc4c-44d6-9b2b-0bce0c5689ef" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.250562] env[62507]: DEBUG nova.compute.utils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1473.251752] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1473.251937] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1473.258954] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1473.315400] env[62507]: DEBUG nova.policy [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '751698c254a140919588ea005a5e586d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e8135bf41224c058bca7f453921f08c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1473.324469] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1473.350425] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1473.350663] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1473.350822] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1473.351010] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1473.351168] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1473.351322] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1473.351529] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1473.351692] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1473.351935] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1473.352157] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1473.352347] env[62507]: DEBUG nova.virt.hardware [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1473.353475] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9d9533-501f-4f64-9122-f9e17156ecfa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.361701] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226bfdc4-2d19-4675-93b0-f1857d704914 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.644315] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Successfully created port: 3a7e34fb-c588-413d-9242-2fc4cef00697 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1474.167370] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.167572] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1474.167741] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.178854] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.179103] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.179276] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.179435] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1474.180594] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae7f35b-c3e1-421e-b768-bec42014df2c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.189904] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d9c3e-1e37-4c7a-934f-69f20af77424 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.210650] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fad842-4f76-4481-ba3c-4e2a54c00a1f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.217654] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3701d8de-64cc-4edc-bc8a-98c4955f0420 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.248034] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181152MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1474.248199] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.248396] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.327279] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b53bed7e-5e76-4aa5-abe2-b05750497404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.327441] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.327571] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.327693] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.327832] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.327961] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.328091] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.328211] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.328327] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.328443] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1474.339613] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1474.349768] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1474.359556] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e3b2a1ab-a686-4e28-85fd-9608f1cd6430 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1474.369032] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 630d18e9-4769-4141-b0a8-7dd32d853be1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1474.379262] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Successfully updated port: 3a7e34fb-c588-413d-9242-2fc4cef00697 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.381153] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1474.381247] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1474.381368] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1474.392441] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "refresh_cache-ef5633ea-273d-429f-9a02-326711b73bab" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.392582] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "refresh_cache-ef5633ea-273d-429f-9a02-326711b73bab" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.392729] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1474.434341] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1474.573557] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3238b533-1517-4402-9f89-532477b7270a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.581498] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9085be3b-de49-461b-a7d2-a0c591525f47 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.610520] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6b6b93-21aa-421f-a0ac-a81b25311cb0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.617360] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f90efd-00a1-4f69-b4bb-e0c1718bd767 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.630097] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.638214] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1474.652244] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1474.652428] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.404s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.791598] env[62507]: DEBUG nova.compute.manager [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Received event network-vif-plugged-3a7e34fb-c588-413d-9242-2fc4cef00697 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1474.791829] env[62507]: DEBUG oslo_concurrency.lockutils [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] Acquiring lock "ef5633ea-273d-429f-9a02-326711b73bab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.792051] env[62507]: DEBUG oslo_concurrency.lockutils [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] Lock "ef5633ea-273d-429f-9a02-326711b73bab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.792230] env[62507]: DEBUG oslo_concurrency.lockutils [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] Lock "ef5633ea-273d-429f-9a02-326711b73bab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.792403] env[62507]: DEBUG nova.compute.manager [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] No waiting events found dispatching network-vif-plugged-3a7e34fb-c588-413d-9242-2fc4cef00697 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1474.792571] env[62507]: WARNING nova.compute.manager [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Received unexpected event network-vif-plugged-3a7e34fb-c588-413d-9242-2fc4cef00697 for instance with vm_state building and task_state spawning. [ 1474.792732] env[62507]: DEBUG nova.compute.manager [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Received event network-changed-3a7e34fb-c588-413d-9242-2fc4cef00697 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1474.792894] env[62507]: DEBUG nova.compute.manager [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Refreshing instance network info cache due to event network-changed-3a7e34fb-c588-413d-9242-2fc4cef00697. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1474.793069] env[62507]: DEBUG oslo_concurrency.lockutils [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] Acquiring lock "refresh_cache-ef5633ea-273d-429f-9a02-326711b73bab" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.834293] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Updating instance_info_cache with network_info: [{"id": "3a7e34fb-c588-413d-9242-2fc4cef00697", "address": "fa:16:3e:4f:28:6e", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7e34fb-c5", "ovs_interfaceid": "3a7e34fb-c588-413d-9242-2fc4cef00697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.847353] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "refresh_cache-ef5633ea-273d-429f-9a02-326711b73bab" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.848034] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance network_info: |[{"id": "3a7e34fb-c588-413d-9242-2fc4cef00697", "address": "fa:16:3e:4f:28:6e", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7e34fb-c5", "ovs_interfaceid": "3a7e34fb-c588-413d-9242-2fc4cef00697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1474.848034] env[62507]: DEBUG oslo_concurrency.lockutils [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] Acquired lock "refresh_cache-ef5633ea-273d-429f-9a02-326711b73bab" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.848246] env[62507]: DEBUG nova.network.neutron [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Refreshing network info cache for port 3a7e34fb-c588-413d-9242-2fc4cef00697 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1474.849205] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:28:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a7e34fb-c588-413d-9242-2fc4cef00697', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1474.856605] env[62507]: DEBUG oslo.service.loopingcall [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.859749] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1474.860411] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a37699eb-f9bd-4211-9ddd-9bf051e85f70 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.880366] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1474.880366] env[62507]: value = "task-2460066" [ 1474.880366] env[62507]: _type = "Task" [ 1474.880366] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.890174] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460066, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.155232] env[62507]: DEBUG nova.network.neutron [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Updated VIF entry in instance network info cache for port 3a7e34fb-c588-413d-9242-2fc4cef00697. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1475.155589] env[62507]: DEBUG nova.network.neutron [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Updating instance_info_cache with network_info: [{"id": "3a7e34fb-c588-413d-9242-2fc4cef00697", "address": "fa:16:3e:4f:28:6e", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7e34fb-c5", "ovs_interfaceid": "3a7e34fb-c588-413d-9242-2fc4cef00697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.167350] env[62507]: DEBUG oslo_concurrency.lockutils [req-f0d34dd7-9da9-45cd-b43d-6affb53deae7 req-8d8b2ecd-c66d-4ae3-8af8-56cd8a781315 service nova] Releasing lock "refresh_cache-ef5633ea-273d-429f-9a02-326711b73bab" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.390764] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460066, 'name': CreateVM_Task, 'duration_secs': 0.28912} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.391073] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1475.391639] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.391809] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.392123] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1475.392371] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaa3c29d-47fe-44e4-99d8-ce621db3fc15 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.396607] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1475.396607] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]524cc12e-ce40-70fa-4633-e119dc531e8e" [ 1475.396607] env[62507]: _type = "Task" [ 1475.396607] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.403670] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]524cc12e-ce40-70fa-4633-e119dc531e8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.906675] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.906935] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1475.907166] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.820459] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.033037] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "ef5633ea-273d-429f-9a02-326711b73bab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.013457] env[62507]: WARNING oslo_vmware.rw_handles [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1522.013457] env[62507]: ERROR oslo_vmware.rw_handles [ 1522.014206] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1522.016017] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1522.016282] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Copying Virtual Disk [datastore2] vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/190ebd21-da43-494e-82f2-bd5d59054526/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1522.016566] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b419b67-d4c6-4c76-be9a-63e61473a606 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.023745] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1522.023745] env[62507]: value = "task-2460067" [ 1522.023745] env[62507]: _type = "Task" [ 1522.023745] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.031175] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.534969] env[62507]: DEBUG oslo_vmware.exceptions [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1522.535284] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.535726] env[62507]: ERROR nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1522.535726] env[62507]: Faults: ['InvalidArgument'] [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Traceback (most recent call last): [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] yield resources [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self.driver.spawn(context, instance, image_meta, [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self._fetch_image_if_missing(context, vi) [ 1522.535726] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] image_cache(vi, tmp_image_ds_loc) [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] vm_util.copy_virtual_disk( [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] session._wait_for_task(vmdk_copy_task) [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] return self.wait_for_task(task_ref) [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] return evt.wait() [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] result = hub.switch() [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1522.536937] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] return self.greenlet.switch() [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self.f(*self.args, **self.kw) [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] raise exceptions.translate_fault(task_info.error) [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Faults: ['InvalidArgument'] [ 1522.537872] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] [ 1522.537872] env[62507]: INFO nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Terminating instance [ 1522.537872] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.538553] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.538553] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3f30c9a-75d3-4f50-bdf8-a1f1ca078cf6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.540180] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1522.540392] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1522.541111] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b37a6a-973c-4225-b099-22c8cfb96370 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.547640] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1522.547841] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-815b9638-d7ef-411a-aaf8-743b0b72cda8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.549855] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.550027] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1522.550958] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf9dde53-1e01-48a7-944a-f470801ea823 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.555921] env[62507]: DEBUG oslo_vmware.api [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 1522.555921] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5271e791-d7bd-5fea-1f59-9acbcb358c2e" [ 1522.555921] env[62507]: _type = "Task" [ 1522.555921] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.570136] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1522.570365] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Creating directory with path [datastore2] vmware_temp/1eb3d724-070a-47ba-a777-51704ff65445/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.570575] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-882a2a7f-441d-44de-a2f9-a646c215fc67 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.589684] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Created directory with path [datastore2] vmware_temp/1eb3d724-070a-47ba-a777-51704ff65445/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.589877] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Fetch image to [datastore2] vmware_temp/1eb3d724-070a-47ba-a777-51704ff65445/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1522.590065] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/1eb3d724-070a-47ba-a777-51704ff65445/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1522.590846] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83184380-4d75-4055-b5ae-1f27456e7570 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.597773] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8b1508-f220-41c6-950c-1acca7b87541 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.606919] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4dcca4-fc0b-45ad-b34c-7345660c4260 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.638162] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97bcf53-0216-40c9-bff3-9cdca1aec199 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.640700] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1522.640892] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1522.641080] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleting the datastore file [datastore2] b53bed7e-5e76-4aa5-abe2-b05750497404 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1522.641301] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bef2b45-0ff8-49f3-8aa6-03e0a2db60df {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.646181] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b3331c0b-8c2d-47c0-a7bf-83aaf51941e8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.649050] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1522.649050] env[62507]: value = "task-2460069" [ 1522.649050] env[62507]: _type = "Task" [ 1522.649050] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.655726] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.676349] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1522.810382] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.811218] env[62507]: ERROR nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = getattr(controller, method)(*args, **kwargs) [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._get(image_id) [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1522.811218] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] resp, body = self.http_client.get(url, headers=header) [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.request(url, 'GET', **kwargs) [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._handle_response(resp) [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exc.from_response(resp, resp.content) [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1522.811613] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] yield resources [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self.driver.spawn(context, instance, image_meta, [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._fetch_image_if_missing(context, vi) [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image_fetch(context, vi, tmp_image_ds_loc) [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] images.fetch_image( [ 1522.812097] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] metadata = IMAGE_API.get(context, image_ref) [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return session.show(context, image_id, [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] _reraise_translated_image_exception(image_id) [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise new_exc.with_traceback(exc_trace) [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = getattr(controller, method)(*args, **kwargs) [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1522.812489] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._get(image_id) [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] resp, body = self.http_client.get(url, headers=header) [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.request(url, 'GET', **kwargs) [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._handle_response(resp) [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exc.from_response(resp, resp.content) [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1522.812874] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1522.813220] env[62507]: INFO nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Terminating instance [ 1522.813220] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.813287] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.813650] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a0ae3c3-89a2-4ca6-b944-3ae8b75e000b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.816232] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1522.816427] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1522.817206] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdc47dd-3a98-4710-8690-50690cd458f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.824290] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1522.825313] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16810f32-50f3-4254-b2dc-a71971d7a8b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.826657] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.826827] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1522.827489] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-962def5b-07e3-4f58-8347-9dbdbe5dc31f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.833780] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 1522.833780] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52da4bad-e104-1ce8-fc0b-f248dfbcf480" [ 1522.833780] env[62507]: _type = "Task" [ 1522.833780] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.843855] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52da4bad-e104-1ce8-fc0b-f248dfbcf480, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.901248] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1522.901469] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1522.901650] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Deleting the datastore file [datastore2] 1fac8aa4-37a9-4f94-8050-b338cd2cd182 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1522.901908] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07add53b-309f-4446-8f13-0dabd92a5f50 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.907638] env[62507]: DEBUG oslo_vmware.api [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for the task: (returnval){ [ 1522.907638] env[62507]: value = "task-2460071" [ 1522.907638] env[62507]: _type = "Task" [ 1522.907638] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.914664] env[62507]: DEBUG oslo_vmware.api [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': task-2460071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.158736] env[62507]: DEBUG oslo_vmware.api [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07284} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.159012] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1523.159199] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1523.159388] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1523.159584] env[62507]: INFO nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1523.161713] env[62507]: DEBUG nova.compute.claims [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1523.161905] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.162139] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.346456] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1523.346721] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating directory with path [datastore2] vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.347193] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5e9db7a-3aec-45d2-b9b9-f28464e2daf5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.361044] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created directory with path [datastore2] vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.361044] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Fetch image to [datastore2] vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1523.361044] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1523.361800] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f13d52-2f2e-4acf-adf6-09d6725355fd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.368989] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbbd5cd-3d24-4ba7-b71d-c445425dca93 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.380649] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127fd0bf-4f41-4525-afd8-7f6cf2a30e57 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.418950] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea18db2f-2e6c-4452-b2ce-6b9e6acdb1c4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.426475] env[62507]: DEBUG oslo_vmware.api [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Task: {'id': task-2460071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073178} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.427957] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1523.428177] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1523.428359] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1523.428536] env[62507]: INFO nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1523.430320] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-57554742-3945-4bfc-bc79-539f612fca53 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.432924] env[62507]: DEBUG nova.compute.claims [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1523.433108] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.433800] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e36e364-e99d-46f8-8675-56434c75657a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.440136] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3521d9-cf0b-433c-a551-6ec54d78fe0a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.472109] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9815d4-ccb0-44fc-b941-2e9f7bff57f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.474880] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1523.481573] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0322f06-1932-439c-b47d-644b85c88b18 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.494861] env[62507]: DEBUG nova.compute.provider_tree [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.506602] env[62507]: DEBUG nova.scheduler.client.report [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1523.523498] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.523850] env[62507]: ERROR nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.523850] env[62507]: Faults: ['InvalidArgument'] [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Traceback (most recent call last): [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self.driver.spawn(context, instance, image_meta, [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self._fetch_image_if_missing(context, vi) [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] image_cache(vi, tmp_image_ds_loc) [ 1523.523850] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] vm_util.copy_virtual_disk( [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] session._wait_for_task(vmdk_copy_task) [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] return self.wait_for_task(task_ref) [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] return evt.wait() [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] result = hub.switch() [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] return self.greenlet.switch() [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1523.524287] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] self.f(*self.args, **self.kw) [ 1523.524657] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1523.524657] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] raise exceptions.translate_fault(task_info.error) [ 1523.524657] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.524657] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Faults: ['InvalidArgument'] [ 1523.524657] env[62507]: ERROR nova.compute.manager [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] [ 1523.524657] env[62507]: DEBUG nova.compute.utils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1523.525626] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.092s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.528932] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Build of instance b53bed7e-5e76-4aa5-abe2-b05750497404 was re-scheduled: A specified parameter was not correct: fileType [ 1523.528932] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1523.529275] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1523.529468] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1523.529665] env[62507]: DEBUG nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1523.529826] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1523.532498] env[62507]: DEBUG oslo_vmware.rw_handles [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1523.595031] env[62507]: DEBUG oslo_vmware.rw_handles [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1523.595241] env[62507]: DEBUG oslo_vmware.rw_handles [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1523.764438] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1032ef-cb29-4ffd-9785-35a74d8ee388 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.772096] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a30e196-c232-4627-91cd-ab228d6f0c85 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.804047] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65ccb0f-4fa1-4ed6-a4a6-3eb28ccd4da5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.811110] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1147ca3-edb3-45d1-92ff-995a54bd9d03 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.823877] env[62507]: DEBUG nova.compute.provider_tree [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.832325] env[62507]: DEBUG nova.scheduler.client.report [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1523.846068] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.846778] env[62507]: ERROR nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = getattr(controller, method)(*args, **kwargs) [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._get(image_id) [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1523.846778] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] resp, body = self.http_client.get(url, headers=header) [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.request(url, 'GET', **kwargs) [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._handle_response(resp) [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exc.from_response(resp, resp.content) [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.847074] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self.driver.spawn(context, instance, image_meta, [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._fetch_image_if_missing(context, vi) [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image_fetch(context, vi, tmp_image_ds_loc) [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] images.fetch_image( [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] metadata = IMAGE_API.get(context, image_ref) [ 1523.847360] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return session.show(context, image_id, [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] _reraise_translated_image_exception(image_id) [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise new_exc.with_traceback(exc_trace) [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = getattr(controller, method)(*args, **kwargs) [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._get(image_id) [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1523.847660] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] resp, body = self.http_client.get(url, headers=header) [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.request(url, 'GET', **kwargs) [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._handle_response(resp) [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exc.from_response(resp, resp.content) [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1523.847977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.847977] env[62507]: DEBUG nova.compute.utils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1523.848925] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Build of instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 was re-scheduled: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1523.849448] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1523.849643] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1523.849799] env[62507]: DEBUG nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1523.849965] env[62507]: DEBUG nova.network.neutron [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1523.873061] env[62507]: DEBUG nova.network.neutron [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.884598] env[62507]: INFO nova.compute.manager [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Took 0.35 seconds to deallocate network for instance. [ 1523.967792] env[62507]: DEBUG neutronclient.v2_0.client [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62507) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1523.969089] env[62507]: ERROR nova.compute.manager [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = getattr(controller, method)(*args, **kwargs) [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._get(image_id) [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1523.969089] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] resp, body = self.http_client.get(url, headers=header) [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.request(url, 'GET', **kwargs) [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._handle_response(resp) [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exc.from_response(resp, resp.content) [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.969481] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self.driver.spawn(context, instance, image_meta, [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._fetch_image_if_missing(context, vi) [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image_fetch(context, vi, tmp_image_ds_loc) [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] images.fetch_image( [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] metadata = IMAGE_API.get(context, image_ref) [ 1523.969818] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return session.show(context, image_id, [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] _reraise_translated_image_exception(image_id) [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise new_exc.with_traceback(exc_trace) [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = getattr(controller, method)(*args, **kwargs) [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._get(image_id) [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1523.970178] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] resp, body = self.http_client.get(url, headers=header) [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.request(url, 'GET', **kwargs) [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self._handle_response(resp) [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exc.from_response(resp, resp.content) [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] nova.exception.ImageNotAuthorized: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.970559] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._build_and_run_instance(context, instance, image, [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exception.RescheduledException( [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] nova.exception.RescheduledException: Build of instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 was re-scheduled: Not authorized for image 601dc712-1d53-404c-b128-df5971f300a1. [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1523.970914] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] exception_handler_v20(status_code, error_body) [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise client_exc(message=error_message, [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Neutron server returns request_ids: ['req-5189fd57-3fe6-4934-a047-28309c2f0998'] [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._deallocate_network(context, instance, requested_networks) [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self.network_api.deallocate_for_instance( [ 1523.971296] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] data = neutron.list_ports(**search_opts) [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.list('ports', self.ports_path, retrieve_all, [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] for r in self._pagination(collection, path, **params): [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] res = self.get(path, params=params) [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.971614] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.retry_request("GET", action, body=body, [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.do_request(method, action, body=body, [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._handle_fault_response(status_code, replybody, resp) [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exception.Unauthorized() [ 1523.971977] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] nova.exception.Unauthorized: Not authorized. [ 1523.972379] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1523.988467] env[62507]: INFO nova.scheduler.client.report [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleted allocations for instance b53bed7e-5e76-4aa5-abe2-b05750497404 [ 1524.019561] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dac2df3-3a42-4bae-828f-9ba79a7cec21 tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.788s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.021646] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.471s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.021646] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "b53bed7e-5e76-4aa5-abe2-b05750497404-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.021816] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.022065] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.024205] env[62507]: INFO nova.compute.manager [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Terminating instance [ 1524.025990] env[62507]: DEBUG nova.compute.manager [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1524.026275] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1524.026794] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eaddb99b-06c8-4778-8c7c-f038f8221514 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.031675] env[62507]: INFO nova.scheduler.client.report [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Deleted allocations for instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 [ 1524.039780] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1524.045268] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f414b4ad-805f-4d55-85e5-6d69650d217e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.056613] env[62507]: DEBUG oslo_concurrency.lockutils [None req-15f4285e-0ab6-4baf-a0c2-e1041836c581 tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.540s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.057553] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.213s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.057762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.057979] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.058137] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.059968] env[62507]: INFO nova.compute.manager [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Terminating instance [ 1524.061483] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquiring lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.061651] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Acquired lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.061796] env[62507]: DEBUG nova.network.neutron [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1524.074495] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b53bed7e-5e76-4aa5-abe2-b05750497404 could not be found. [ 1524.074915] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1524.074915] env[62507]: INFO nova.compute.manager [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1524.075117] env[62507]: DEBUG oslo.service.loopingcall [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.075340] env[62507]: DEBUG nova.compute.manager [-] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1524.075434] env[62507]: DEBUG nova.network.neutron [-] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1524.097394] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1524.105919] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.105919] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.107297] env[62507]: INFO nova.compute.claims [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.110393] env[62507]: DEBUG nova.network.neutron [-] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.122159] env[62507]: INFO nova.compute.manager [-] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] Took 0.05 seconds to deallocate network for instance. [ 1524.185394] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.234333] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0d3c7653-16ec-4117-8ad0-8601f1c8e99d tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.235205] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 412.815s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.235401] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b53bed7e-5e76-4aa5-abe2-b05750497404] During sync_power_state the instance has a pending task (deleting). Skip. [ 1524.235726] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "b53bed7e-5e76-4aa5-abe2-b05750497404" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.356075] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cf57d7-ebd1-478d-adbc-564c32aa7fc5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.365217] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d93cb76-bee7-4130-9219-a78bf0896429 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.396524] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ef9d38-64fb-472e-8af6-c5decadb2055 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.404227] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9e396d-8987-4d9b-928e-ef89f0b33667 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.417545] env[62507]: DEBUG nova.compute.provider_tree [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.427682] env[62507]: DEBUG nova.scheduler.client.report [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1524.442101] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.336s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.442590] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1524.445054] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.260s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.446407] env[62507]: INFO nova.compute.claims [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.452024] env[62507]: DEBUG nova.network.neutron [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Updating instance_info_cache with network_info: [{"id": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "address": "fa:16:3e:53:da:67", "network": {"id": "1d803a42-4a7c-4259-a6ca-4be0f7717b27", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f6379394ace14a7e97a0396f7e1277db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf733ac33-ae", "ovs_interfaceid": "f733ac33-aeb9-4dba-bbf5-fa9ff6fec74a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.467540] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Releasing lock "refresh_cache-1fac8aa4-37a9-4f94-8050-b338cd2cd182" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.467937] env[62507]: DEBUG nova.compute.manager [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1524.468136] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1524.468633] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc7ead49-39ee-453f-87fe-943ca818c7ea {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.477321] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b0bd56-bd76-4d30-86d3-0de8ab8fe8e3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.489492] env[62507]: DEBUG nova.compute.utils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1524.492867] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1524.496017] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1524.510538] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1fac8aa4-37a9-4f94-8050-b338cd2cd182 could not be found. [ 1524.510751] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1524.510932] env[62507]: INFO nova.compute.manager [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1524.511236] env[62507]: DEBUG oslo.service.loopingcall [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.514131] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1524.516850] env[62507]: DEBUG nova.compute.manager [-] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1524.516927] env[62507]: DEBUG nova.network.neutron [-] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1524.581491] env[62507]: DEBUG nova.policy [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df0b12531a3e46e4a97a8d4082d6868e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13cb14d09e6f4d84996e4470f4e24eeb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1524.604732] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1524.646842] env[62507]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62507) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1524.646842] env[62507]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-5454d88c-d453-4411-a727-14b615c70119'] [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1524.647233] env[62507]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1524.647696] env[62507]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.648585] env[62507]: ERROR oslo.service.loopingcall [ 1524.649077] env[62507]: ERROR nova.compute.manager [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.660331] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.660622] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.660820] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.661048] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.661238] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.661394] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.661657] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.661917] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.662184] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.662402] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.662634] env[62507]: DEBUG nova.virt.hardware [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.663595] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b3d926-56ad-4b8e-932c-87ebf55e9e43 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.676144] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816cb48e-498b-4c0c-93bf-02b20961ff6f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.685858] env[62507]: ERROR nova.compute.manager [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] exception_handler_v20(status_code, error_body) [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise client_exc(message=error_message, [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Neutron server returns request_ids: ['req-5454d88c-d453-4411-a727-14b615c70119'] [ 1524.685858] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] During handling of the above exception, another exception occurred: [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Traceback (most recent call last): [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._delete_instance(context, instance, bdms) [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._shutdown_instance(context, instance, bdms) [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._try_deallocate_network(context, instance, requested_networks) [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] with excutils.save_and_reraise_exception(): [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1524.686171] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self.force_reraise() [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise self.value [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] _deallocate_network_with_retries() [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return evt.wait() [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = hub.switch() [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.greenlet.switch() [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1524.686510] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = func(*self.args, **self.kw) [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] result = f(*args, **kwargs) [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._deallocate_network( [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self.network_api.deallocate_for_instance( [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] data = neutron.list_ports(**search_opts) [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.list('ports', self.ports_path, retrieve_all, [ 1524.686814] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] for r in self._pagination(collection, path, **params): [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] res = self.get(path, params=params) [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.retry_request("GET", action, body=body, [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1524.687152] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] return self.do_request(method, action, body=body, [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] ret = obj(*args, **kwargs) [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] self._handle_fault_response(status_code, replybody, resp) [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.687467] env[62507]: ERROR nova.compute.manager [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] [ 1524.718854] env[62507]: DEBUG oslo_concurrency.lockutils [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Lock "1fac8aa4-37a9-4f94-8050-b338cd2cd182" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.661s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.726414] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf26601-ea0a-42ca-a81c-6ddcf2aea9a0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.738198] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07cd580-cb2d-4b89-bd37-49ce4090a0d8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.772404] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ccc529-da48-45b2-8b8e-74fe7fafeb63 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.780403] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0706952a-4190-44dc-b93c-70f9dfae1f37 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.796309] env[62507]: DEBUG nova.compute.provider_tree [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.801194] env[62507]: INFO nova.compute.manager [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] [instance: 1fac8aa4-37a9-4f94-8050-b338cd2cd182] Successfully reverted task state from None on failure for instance. [ 1524.804377] env[62507]: DEBUG nova.scheduler.client.report [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server [None req-738dcf24-ae8c-439b-8de6-f2c0f3b3cc6c tempest-MigrationsAdminTest-1356025599 tempest-MigrationsAdminTest-1356025599-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-5454d88c-d453-4411-a727-14b615c70119'] [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1524.807979] env[62507]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1524.808452] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1524.809069] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server raise self.value [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1524.809531] env[62507]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.810311] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.811157] env[62507]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1524.811807] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1524.811807] env[62507]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1524.811807] env[62507]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1524.811807] env[62507]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1524.811807] env[62507]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.811807] env[62507]: ERROR oslo_messaging.rpc.server [ 1524.816998] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.371s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.816998] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1524.855755] env[62507]: DEBUG nova.compute.utils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1524.857010] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1524.857192] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1524.878331] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1524.946942] env[62507]: DEBUG nova.policy [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '156af0071802455d9cf233b60f1761f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '884c06ca36464159847e4a452154a873', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1524.955550] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1524.967269] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Successfully created port: 4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1524.980467] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.980692] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.980850] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.981046] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.981199] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.981347] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.981560] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.981731] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.981906] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.982086] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.982269] env[62507]: DEBUG nova.virt.hardware [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.983770] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f078554a-f89c-41d0-91da-a27889aa70a8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.996027] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac5a7d7-be69-4396-9b76-295987a51582 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.284204] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Successfully created port: 430fe608-6304-405b-aa59-86cefe2399c2 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1525.998299] env[62507]: DEBUG nova.compute.manager [req-bbfee5ad-f6c7-44c7-b696-e9d8b631c5ae req-053bdfb9-a60e-4296-874e-a3ab7ba6baf6 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Received event network-vif-plugged-4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1525.998529] env[62507]: DEBUG oslo_concurrency.lockutils [req-bbfee5ad-f6c7-44c7-b696-e9d8b631c5ae req-053bdfb9-a60e-4296-874e-a3ab7ba6baf6 service nova] Acquiring lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.998739] env[62507]: DEBUG oslo_concurrency.lockutils [req-bbfee5ad-f6c7-44c7-b696-e9d8b631c5ae req-053bdfb9-a60e-4296-874e-a3ab7ba6baf6 service nova] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.998910] env[62507]: DEBUG oslo_concurrency.lockutils [req-bbfee5ad-f6c7-44c7-b696-e9d8b631c5ae req-053bdfb9-a60e-4296-874e-a3ab7ba6baf6 service nova] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.999094] env[62507]: DEBUG nova.compute.manager [req-bbfee5ad-f6c7-44c7-b696-e9d8b631c5ae req-053bdfb9-a60e-4296-874e-a3ab7ba6baf6 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] No waiting events found dispatching network-vif-plugged-4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1525.999265] env[62507]: WARNING nova.compute.manager [req-bbfee5ad-f6c7-44c7-b696-e9d8b631c5ae req-053bdfb9-a60e-4296-874e-a3ab7ba6baf6 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Received unexpected event network-vif-plugged-4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 for instance with vm_state building and task_state spawning. [ 1526.018018] env[62507]: DEBUG nova.compute.manager [req-360556f5-041e-4632-8abf-8cc3380ccde3 req-173bbcae-375c-4006-b7a6-05637d88bbd7 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Received event network-vif-plugged-430fe608-6304-405b-aa59-86cefe2399c2 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1526.018267] env[62507]: DEBUG oslo_concurrency.lockutils [req-360556f5-041e-4632-8abf-8cc3380ccde3 req-173bbcae-375c-4006-b7a6-05637d88bbd7 service nova] Acquiring lock "637de77e-d142-45ca-8a4e-3bf365e31502-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.018470] env[62507]: DEBUG oslo_concurrency.lockutils [req-360556f5-041e-4632-8abf-8cc3380ccde3 req-173bbcae-375c-4006-b7a6-05637d88bbd7 service nova] Lock "637de77e-d142-45ca-8a4e-3bf365e31502-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.018636] env[62507]: DEBUG oslo_concurrency.lockutils [req-360556f5-041e-4632-8abf-8cc3380ccde3 req-173bbcae-375c-4006-b7a6-05637d88bbd7 service nova] Lock "637de77e-d142-45ca-8a4e-3bf365e31502-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.018926] env[62507]: DEBUG nova.compute.manager [req-360556f5-041e-4632-8abf-8cc3380ccde3 req-173bbcae-375c-4006-b7a6-05637d88bbd7 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] No waiting events found dispatching network-vif-plugged-430fe608-6304-405b-aa59-86cefe2399c2 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1526.018979] env[62507]: WARNING nova.compute.manager [req-360556f5-041e-4632-8abf-8cc3380ccde3 req-173bbcae-375c-4006-b7a6-05637d88bbd7 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Received unexpected event network-vif-plugged-430fe608-6304-405b-aa59-86cefe2399c2 for instance with vm_state building and task_state spawning. [ 1526.063682] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Successfully updated port: 4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1526.076859] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "refresh_cache-7ff089f8-f304-4c2e-bf3d-16997fe8968c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.077028] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "refresh_cache-7ff089f8-f304-4c2e-bf3d-16997fe8968c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.077189] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1526.130520] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Successfully updated port: 430fe608-6304-405b-aa59-86cefe2399c2 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1526.142088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "refresh_cache-637de77e-d142-45ca-8a4e-3bf365e31502" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.142376] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired lock "refresh_cache-637de77e-d142-45ca-8a4e-3bf365e31502" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.142657] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1526.144522] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1526.190129] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1526.367928] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Updating instance_info_cache with network_info: [{"id": "4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54", "address": "fa:16:3e:94:71:4a", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae128f6-e6", "ovs_interfaceid": "4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.370010] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Updating instance_info_cache with network_info: [{"id": "430fe608-6304-405b-aa59-86cefe2399c2", "address": "fa:16:3e:01:ff:20", "network": {"id": "e1d5d969-2a6a-4fe4-a020-7cfcf85e69ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-524905620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "884c06ca36464159847e4a452154a873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap430fe608-63", "ovs_interfaceid": "430fe608-6304-405b-aa59-86cefe2399c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.385594] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "refresh_cache-7ff089f8-f304-4c2e-bf3d-16997fe8968c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.385893] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance network_info: |[{"id": "4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54", "address": "fa:16:3e:94:71:4a", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae128f6-e6", "ovs_interfaceid": "4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1526.386354] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:71:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1526.393970] env[62507]: DEBUG oslo.service.loopingcall [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.394792] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1526.395198] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Releasing lock "refresh_cache-637de77e-d142-45ca-8a4e-3bf365e31502" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.395470] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance network_info: |[{"id": "430fe608-6304-405b-aa59-86cefe2399c2", "address": "fa:16:3e:01:ff:20", "network": {"id": "e1d5d969-2a6a-4fe4-a020-7cfcf85e69ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-524905620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "884c06ca36464159847e4a452154a873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap430fe608-63", "ovs_interfaceid": "430fe608-6304-405b-aa59-86cefe2399c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1526.395717] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c10d0286-2ebd-4d5b-af84-974eeb6ffc99 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.410129] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:ff:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '430fe608-6304-405b-aa59-86cefe2399c2', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1526.417682] env[62507]: DEBUG oslo.service.loopingcall [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.418246] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1526.418866] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62d60e38-5a6a-40c4-a849-9faa31e1abd9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.434443] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1526.434443] env[62507]: value = "task-2460072" [ 1526.434443] env[62507]: _type = "Task" [ 1526.434443] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.439449] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1526.439449] env[62507]: value = "task-2460073" [ 1526.439449] env[62507]: _type = "Task" [ 1526.439449] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.445915] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460072, 'name': CreateVM_Task} progress is 6%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.450760] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460073, 'name': CreateVM_Task} progress is 5%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.947167] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460072, 'name': CreateVM_Task, 'duration_secs': 0.34203} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.947167] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1526.947167] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.947378] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.947662] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1526.951108] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e503293-3858-4113-9e10-8a3621bb40f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.952566] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460073, 'name': CreateVM_Task, 'duration_secs': 0.349457} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.952725] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1526.953583] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.955433] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1526.955433] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5299f79f-7b76-46a4-aa57-a7b5ea31314e" [ 1526.955433] env[62507]: _type = "Task" [ 1526.955433] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.962667] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5299f79f-7b76-46a4-aa57-a7b5ea31314e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.466914] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.466914] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1527.467246] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.467246] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.467467] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1527.467708] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cee18165-3f05-435c-8e4d-2b188343bb94 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.471821] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 1527.471821] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5222c525-6dbb-970b-c4cc-b822046b1c94" [ 1527.471821] env[62507]: _type = "Task" [ 1527.471821] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.478671] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5222c525-6dbb-970b-c4cc-b822046b1c94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.982208] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.982466] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1527.982681] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.031412] env[62507]: DEBUG nova.compute.manager [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Received event network-changed-4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1528.031600] env[62507]: DEBUG nova.compute.manager [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Refreshing instance network info cache due to event network-changed-4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1528.031814] env[62507]: DEBUG oslo_concurrency.lockutils [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] Acquiring lock "refresh_cache-7ff089f8-f304-4c2e-bf3d-16997fe8968c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.031959] env[62507]: DEBUG oslo_concurrency.lockutils [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] Acquired lock "refresh_cache-7ff089f8-f304-4c2e-bf3d-16997fe8968c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.032137] env[62507]: DEBUG nova.network.neutron [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Refreshing network info cache for port 4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1528.048046] env[62507]: DEBUG nova.compute.manager [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Received event network-changed-430fe608-6304-405b-aa59-86cefe2399c2 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1528.048251] env[62507]: DEBUG nova.compute.manager [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Refreshing instance network info cache due to event network-changed-430fe608-6304-405b-aa59-86cefe2399c2. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1528.048453] env[62507]: DEBUG oslo_concurrency.lockutils [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] Acquiring lock "refresh_cache-637de77e-d142-45ca-8a4e-3bf365e31502" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.048594] env[62507]: DEBUG oslo_concurrency.lockutils [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] Acquired lock "refresh_cache-637de77e-d142-45ca-8a4e-3bf365e31502" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.048749] env[62507]: DEBUG nova.network.neutron [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Refreshing network info cache for port 430fe608-6304-405b-aa59-86cefe2399c2 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1528.373467] env[62507]: DEBUG nova.network.neutron [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Updated VIF entry in instance network info cache for port 430fe608-6304-405b-aa59-86cefe2399c2. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1528.373822] env[62507]: DEBUG nova.network.neutron [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Updating instance_info_cache with network_info: [{"id": "430fe608-6304-405b-aa59-86cefe2399c2", "address": "fa:16:3e:01:ff:20", "network": {"id": "e1d5d969-2a6a-4fe4-a020-7cfcf85e69ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-524905620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "884c06ca36464159847e4a452154a873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap430fe608-63", "ovs_interfaceid": "430fe608-6304-405b-aa59-86cefe2399c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.384379] env[62507]: DEBUG oslo_concurrency.lockutils [req-973a655c-2b0e-459f-8bbb-c917542b066f req-93ef5ad1-2249-40cf-acf7-76b7025a3646 service nova] Releasing lock "refresh_cache-637de77e-d142-45ca-8a4e-3bf365e31502" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.577935] env[62507]: DEBUG nova.network.neutron [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Updated VIF entry in instance network info cache for port 4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1528.578321] env[62507]: DEBUG nova.network.neutron [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Updating instance_info_cache with network_info: [{"id": "4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54", "address": "fa:16:3e:94:71:4a", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae128f6-e6", "ovs_interfaceid": "4ae128f6-e6fa-4fe9-bca4-bc4178cdfc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.588657] env[62507]: DEBUG oslo_concurrency.lockutils [req-058c1968-30c0-42f9-8bfd-6f764c5af345 req-b4a30b72-5980-4aa2-9a5b-df9415165a69 service nova] Releasing lock "refresh_cache-7ff089f8-f304-4c2e-bf3d-16997fe8968c" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.647492] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.167687] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.168060] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1531.168060] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1531.192849] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.193013] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.193211] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.193312] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.193439] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.193625] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.193713] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.194417] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.194417] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.194417] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1531.194417] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1531.195430] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.167771] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.167771] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.167654] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.167940] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.168063] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1535.167348] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.167986] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.179765] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.179967] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.180148] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.180304] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1536.181435] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0db3c1a-00b2-498e-adba-a690f1b2cf3f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.190349] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d995d68c-c4f5-4091-8d96-a60008373097 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.203943] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fc0296-cb3b-4538-a89d-8a97b6be6cc2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.209913] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d109a48b-990a-4dd9-84b7-ee6ac9471de7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.239765] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181167MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1536.239916] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.240122] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.312051] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312051] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312051] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312051] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312276] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312276] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312276] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312276] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312408] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.312408] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.323758] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance e3b2a1ab-a686-4e28-85fd-9608f1cd6430 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.335982] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 630d18e9-4769-4141-b0a8-7dd32d853be1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.345711] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.345711] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1536.345859] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1536.491838] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30226f9-150f-4bcc-bf26-3f54a8314271 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.499560] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403e8b60-ca52-4786-a060-5d002d80ad9c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.529025] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e0abf5-a281-4901-a8cf-fb964b236ba7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.535765] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11152742-aa2c-4c94-a64a-a6b8d9fde1c3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.548374] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.556952] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1536.571479] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1536.571659] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.332s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.948908] env[62507]: DEBUG oslo_concurrency.lockutils [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.802535] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "637de77e-d142-45ca-8a4e-3bf365e31502" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.003147] env[62507]: WARNING oslo_vmware.rw_handles [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1571.003147] env[62507]: ERROR oslo_vmware.rw_handles [ 1571.003938] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1571.005955] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1571.009516] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Copying Virtual Disk [datastore2] vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/f9455e37-acb1-4736-af23-348573406aec/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1571.009874] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70a8bf84-ce85-49df-8fcb-07b2424bff5b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.021248] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 1571.021248] env[62507]: value = "task-2460074" [ 1571.021248] env[62507]: _type = "Task" [ 1571.021248] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.029640] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': task-2460074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.531807] env[62507]: DEBUG oslo_vmware.exceptions [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1571.532150] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.535103] env[62507]: ERROR nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1571.535103] env[62507]: Faults: ['InvalidArgument'] [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Traceback (most recent call last): [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] yield resources [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self.driver.spawn(context, instance, image_meta, [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self._fetch_image_if_missing(context, vi) [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1571.535103] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] image_cache(vi, tmp_image_ds_loc) [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] vm_util.copy_virtual_disk( [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] session._wait_for_task(vmdk_copy_task) [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] return self.wait_for_task(task_ref) [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] return evt.wait() [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] result = hub.switch() [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] return self.greenlet.switch() [ 1571.535538] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1571.535887] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self.f(*self.args, **self.kw) [ 1571.535887] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1571.535887] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] raise exceptions.translate_fault(task_info.error) [ 1571.535887] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1571.535887] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Faults: ['InvalidArgument'] [ 1571.535887] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] [ 1571.535887] env[62507]: INFO nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Terminating instance [ 1571.535887] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.536328] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.537045] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1571.537358] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1571.537811] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f72c6a3-9ff7-47a7-b28a-cd493a7d4eab {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.540526] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578a2a7e-6a54-4f33-9628-2bbc6f4daba9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.549554] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1571.551063] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af85c602-d9bb-470b-bb90-9cbb2a801fd7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.553509] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.557183] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1571.557183] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99455d96-5e52-4ddd-9300-8bfaac5d5ab8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.563393] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Waiting for the task: (returnval){ [ 1571.563393] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529bf30b-201a-40ea-941c-813f9651d905" [ 1571.563393] env[62507]: _type = "Task" [ 1571.563393] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.572233] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529bf30b-201a-40ea-941c-813f9651d905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.620777] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1571.621418] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1571.621652] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Deleting the datastore file [datastore2] 3627bbf7-507f-4345-b093-3b4f5bb45eae {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1571.621926] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee8bb59c-def4-4e98-b364-87a65568620b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.629198] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 1571.629198] env[62507]: value = "task-2460076" [ 1571.629198] env[62507]: _type = "Task" [ 1571.629198] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.637472] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': task-2460076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.081938] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1572.081938] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Creating directory with path [datastore2] vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.081938] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da37cc55-5f93-4c86-8feb-891a5bdef3df {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.099804] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Created directory with path [datastore2] vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.099804] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Fetch image to [datastore2] vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1572.099804] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1572.101182] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789ca809-2f82-41d4-aefb-6dee8b862ad7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.111048] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499dfd69-ff5d-4dad-8932-65cf520e1df9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.120858] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51cdd9c-367f-47d1-9830-ea481d262b35 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.165425] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef59f417-938f-48cb-8194-68bbd66f7ef2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.175223] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4fc0d92c-a846-4301-96d9-12c236d4a696 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.176851] env[62507]: DEBUG oslo_vmware.api [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': task-2460076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081583} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.177179] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1572.177345] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1572.177516] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1572.177765] env[62507]: INFO nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1572.179953] env[62507]: DEBUG nova.compute.claims [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1572.180158] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.180379] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.201910] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1572.269434] env[62507]: DEBUG oslo_vmware.rw_handles [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1572.334719] env[62507]: DEBUG oslo_vmware.rw_handles [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1572.334719] env[62507]: DEBUG oslo_vmware.rw_handles [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1572.482067] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.482324] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.488622] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31dba56-c449-4bab-b761-77e0c57ed366 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.496434] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdba73f8-4d91-461a-8c06-ddcfc9bbf1f8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.658346] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ec70a7-ca9d-4f22-8bc6-94207d77586e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.658346] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad56bae-38d3-4ce3-a2bb-dbaa75304a4f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.658346] env[62507]: DEBUG nova.compute.provider_tree [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.658346] env[62507]: DEBUG nova.scheduler.client.report [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1572.659244] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.440s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.659244] env[62507]: ERROR nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.659244] env[62507]: Faults: ['InvalidArgument'] [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Traceback (most recent call last): [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self.driver.spawn(context, instance, image_meta, [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1572.659244] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self._fetch_image_if_missing(context, vi) [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] image_cache(vi, tmp_image_ds_loc) [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] vm_util.copy_virtual_disk( [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] session._wait_for_task(vmdk_copy_task) [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] return self.wait_for_task(task_ref) [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] return evt.wait() [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] result = hub.switch() [ 1572.659519] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] return self.greenlet.switch() [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] self.f(*self.args, **self.kw) [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] raise exceptions.translate_fault(task_info.error) [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Faults: ['InvalidArgument'] [ 1572.659847] env[62507]: ERROR nova.compute.manager [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] [ 1572.659847] env[62507]: DEBUG nova.compute.utils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1572.659847] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Build of instance 3627bbf7-507f-4345-b093-3b4f5bb45eae was re-scheduled: A specified parameter was not correct: fileType [ 1572.660118] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1572.660118] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1572.660118] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1572.660118] env[62507]: DEBUG nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1572.660118] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.262898] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.263328] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.285149] env[62507]: DEBUG nova.network.neutron [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.299619] env[62507]: INFO nova.compute.manager [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Took 0.67 seconds to deallocate network for instance. [ 1573.418243] env[62507]: INFO nova.scheduler.client.report [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Deleted allocations for instance 3627bbf7-507f-4345-b093-3b4f5bb45eae [ 1573.445884] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c2f2cea8-1ab8-4d49-937a-06ae1c161407 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 578.766s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.447390] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 383.554s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.447607] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "3627bbf7-507f-4345-b093-3b4f5bb45eae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.447814] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.448350] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.451837] env[62507]: INFO nova.compute.manager [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Terminating instance [ 1573.454439] env[62507]: DEBUG nova.compute.manager [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1573.455918] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1573.456825] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7f08e96-8ca2-42c5-859f-ee9731ca78b1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.459500] env[62507]: DEBUG nova.compute.manager [None req-ae7776d7-4d7c-44d7-a415-526aee7ef126 tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] [instance: e3b2a1ab-a686-4e28-85fd-9608f1cd6430] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1573.468895] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d43cbfc-9f41-409f-a205-28312d97ea7c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.488373] env[62507]: DEBUG nova.compute.manager [None req-ae7776d7-4d7c-44d7-a415-526aee7ef126 tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] [instance: e3b2a1ab-a686-4e28-85fd-9608f1cd6430] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1573.502745] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3627bbf7-507f-4345-b093-3b4f5bb45eae could not be found. [ 1573.503195] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1573.503993] env[62507]: INFO nova.compute.manager [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1573.504215] env[62507]: DEBUG oslo.service.loopingcall [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.508221] env[62507]: DEBUG nova.compute.manager [-] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1573.508339] env[62507]: DEBUG nova.network.neutron [-] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.519263] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ae7776d7-4d7c-44d7-a415-526aee7ef126 tempest-ServersTestMultiNic-508332718 tempest-ServersTestMultiNic-508332718-project-member] Lock "e3b2a1ab-a686-4e28-85fd-9608f1cd6430" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.405s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.530853] env[62507]: DEBUG nova.compute.manager [None req-ef2bb14a-e7bc-407f-9c83-f81122f3de0c tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: 630d18e9-4769-4141-b0a8-7dd32d853be1] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1573.541455] env[62507]: DEBUG nova.network.neutron [-] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.552926] env[62507]: INFO nova.compute.manager [-] [instance: 3627bbf7-507f-4345-b093-3b4f5bb45eae] Took 0.04 seconds to deallocate network for instance. [ 1573.562287] env[62507]: DEBUG nova.compute.manager [None req-ef2bb14a-e7bc-407f-9c83-f81122f3de0c tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: 630d18e9-4769-4141-b0a8-7dd32d853be1] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1573.583514] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ef2bb14a-e7bc-407f-9c83-f81122f3de0c tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "630d18e9-4769-4141-b0a8-7dd32d853be1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.255s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.604845] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1573.655203] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.655203] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.655203] env[62507]: INFO nova.compute.claims [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1573.660164] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ca923de-c62f-43a0-a3a9-20f68db3e0a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "3627bbf7-507f-4345-b093-3b4f5bb45eae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.843835] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d54fa66-cc77-4bc5-b8af-ff4e12ea61e2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.851546] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a847c57-b61d-485a-9ae4-54b2f65c2288 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.893269] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fe27f1-bd0b-4553-8d5d-8a5805f6f6e7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.899221] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ab3d0d-5e49-4201-9d39-9d9ce97ef264 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.913502] env[62507]: DEBUG nova.compute.provider_tree [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.921838] env[62507]: DEBUG nova.scheduler.client.report [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1573.936106] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.936545] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1573.972896] env[62507]: DEBUG nova.compute.utils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1573.975563] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1573.975563] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1573.985647] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1574.033027] env[62507]: DEBUG nova.policy [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a05f77d144740b0a37ca55fe163a511', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '24f3f26978fa490fa5fe8dcd8573c61e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1574.055055] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1574.082970] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.083232] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.083389] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.083581] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.083730] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.083886] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.088880] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.088880] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.089027] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.089105] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.089293] env[62507]: DEBUG nova.virt.hardware [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.093563] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83909e2-41d5-4f0a-87e1-d2f69401ef51 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.101757] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006e3690-c2c6-44c2-902c-e5b840898fe1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.482482] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Successfully created port: a434b811-7f99-4429-99e4-a2740c0dc710 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1575.150303] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Successfully updated port: a434b811-7f99-4429-99e4-a2740c0dc710 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.164934] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "refresh_cache-fb7f3a79-bd28-48b9-9a64-db1750b0f716" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.164934] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "refresh_cache-fb7f3a79-bd28-48b9-9a64-db1750b0f716" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.164934] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1575.202553] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1575.358308] env[62507]: DEBUG nova.compute.manager [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Received event network-vif-plugged-a434b811-7f99-4429-99e4-a2740c0dc710 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1575.358557] env[62507]: DEBUG oslo_concurrency.lockutils [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] Acquiring lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.358769] env[62507]: DEBUG oslo_concurrency.lockutils [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.358942] env[62507]: DEBUG oslo_concurrency.lockutils [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.359126] env[62507]: DEBUG nova.compute.manager [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] No waiting events found dispatching network-vif-plugged-a434b811-7f99-4429-99e4-a2740c0dc710 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1575.359305] env[62507]: WARNING nova.compute.manager [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Received unexpected event network-vif-plugged-a434b811-7f99-4429-99e4-a2740c0dc710 for instance with vm_state building and task_state spawning. [ 1575.359468] env[62507]: DEBUG nova.compute.manager [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Received event network-changed-a434b811-7f99-4429-99e4-a2740c0dc710 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1575.359621] env[62507]: DEBUG nova.compute.manager [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Refreshing instance network info cache due to event network-changed-a434b811-7f99-4429-99e4-a2740c0dc710. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1575.359857] env[62507]: DEBUG oslo_concurrency.lockutils [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] Acquiring lock "refresh_cache-fb7f3a79-bd28-48b9-9a64-db1750b0f716" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.384376] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Updating instance_info_cache with network_info: [{"id": "a434b811-7f99-4429-99e4-a2740c0dc710", "address": "fa:16:3e:9c:32:36", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa434b811-7f", "ovs_interfaceid": "a434b811-7f99-4429-99e4-a2740c0dc710", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.395139] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "refresh_cache-fb7f3a79-bd28-48b9-9a64-db1750b0f716" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.395452] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance network_info: |[{"id": "a434b811-7f99-4429-99e4-a2740c0dc710", "address": "fa:16:3e:9c:32:36", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa434b811-7f", "ovs_interfaceid": "a434b811-7f99-4429-99e4-a2740c0dc710", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1575.395909] env[62507]: DEBUG oslo_concurrency.lockutils [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] Acquired lock "refresh_cache-fb7f3a79-bd28-48b9-9a64-db1750b0f716" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.395909] env[62507]: DEBUG nova.network.neutron [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Refreshing network info cache for port a434b811-7f99-4429-99e4-a2740c0dc710 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1575.396985] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:32:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a434b811-7f99-4429-99e4-a2740c0dc710', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1575.404629] env[62507]: DEBUG oslo.service.loopingcall [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.405474] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1575.407879] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53dda1bc-4d90-45fd-afcf-0b44b97a93d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.428548] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1575.428548] env[62507]: value = "task-2460077" [ 1575.428548] env[62507]: _type = "Task" [ 1575.428548] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.437198] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460077, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.762698] env[62507]: DEBUG nova.network.neutron [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Updated VIF entry in instance network info cache for port a434b811-7f99-4429-99e4-a2740c0dc710. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1575.763079] env[62507]: DEBUG nova.network.neutron [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Updating instance_info_cache with network_info: [{"id": "a434b811-7f99-4429-99e4-a2740c0dc710", "address": "fa:16:3e:9c:32:36", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa434b811-7f", "ovs_interfaceid": "a434b811-7f99-4429-99e4-a2740c0dc710", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.772345] env[62507]: DEBUG oslo_concurrency.lockutils [req-953754d1-dfdb-4e3c-a156-fc9834302906 req-23c123d3-4015-4fb7-893a-cc452ec5f42e service nova] Releasing lock "refresh_cache-fb7f3a79-bd28-48b9-9a64-db1750b0f716" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.938658] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460077, 'name': CreateVM_Task, 'duration_secs': 0.293182} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.938847] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1575.939517] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.939681] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.940056] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1575.940307] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ae3bd67-00b1-416e-9719-a52b274219b4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.944915] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 1575.944915] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52c7f8cb-4ff6-d227-127c-8baced15385f" [ 1575.944915] env[62507]: _type = "Task" [ 1575.944915] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.952590] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52c7f8cb-4ff6-d227-127c-8baced15385f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.455074] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.455295] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1576.455509] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.974936] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.810584] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "1c56d4af-ba43-4141-86d6-880ff384041e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.810990] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "1c56d4af-ba43-4141-86d6-880ff384041e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.567202] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.162923] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.187251] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.169064] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1592.169064] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1592.169064] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1592.188380] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.188504] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.188640] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.188771] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.188899] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.189128] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.189309] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.189439] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.189562] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.189682] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1592.189836] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1593.167898] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.167898] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.167934] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.168286] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1595.168277] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.168573] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.167600] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.179461] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.179745] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.179881] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.180056] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1596.181167] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a128f0-4419-4240-9095-dbebfda331b4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.189831] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051e09e8-54a0-4a34-9e99-bdcf83876569 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.203387] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a4c2a8-89c9-4d6b-b74d-2b88ac571f06 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.209496] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71702b6-8b4f-4d00-8cf1-5e71e72bfa99 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.236972] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181118MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1596.237125] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.237316] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.305020] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305020] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305198] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305198] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305293] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305415] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305534] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305649] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305762] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.305874] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1596.315951] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1596.325482] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1596.334042] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1596.334251] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1596.334401] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1596.477648] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364c42b9-5b1e-4976-8d58-03518aa37c04 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.485400] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a36c0c-48a1-447b-94da-da16469d7b19 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.514564] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895550c8-d21f-4e83-b8d7-e4b62fe2083f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.521542] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf018b1-3050-4298-b01b-5a3e1cdaa48d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.534526] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1596.542802] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1596.557462] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1596.557741] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.320s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.011564] env[62507]: WARNING oslo_vmware.rw_handles [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1618.011564] env[62507]: ERROR oslo_vmware.rw_handles [ 1618.012207] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1618.014121] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1618.014379] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Copying Virtual Disk [datastore2] vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/315a06cf-7531-436d-81a1-a81319649f06/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1618.014666] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d56a9591-e4ba-4c7f-b107-eb68be050924 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.022026] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Waiting for the task: (returnval){ [ 1618.022026] env[62507]: value = "task-2460078" [ 1618.022026] env[62507]: _type = "Task" [ 1618.022026] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.030081] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Task: {'id': task-2460078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.533188] env[62507]: DEBUG oslo_vmware.exceptions [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1618.533417] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.533953] env[62507]: ERROR nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1618.533953] env[62507]: Faults: ['InvalidArgument'] [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Traceback (most recent call last): [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] yield resources [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self.driver.spawn(context, instance, image_meta, [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self._fetch_image_if_missing(context, vi) [ 1618.533953] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] image_cache(vi, tmp_image_ds_loc) [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] vm_util.copy_virtual_disk( [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] session._wait_for_task(vmdk_copy_task) [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] return self.wait_for_task(task_ref) [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] return evt.wait() [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] result = hub.switch() [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1618.534362] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] return self.greenlet.switch() [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self.f(*self.args, **self.kw) [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] raise exceptions.translate_fault(task_info.error) [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Faults: ['InvalidArgument'] [ 1618.534770] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] [ 1618.534770] env[62507]: INFO nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Terminating instance [ 1618.535818] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.536036] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1618.536273] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e8a9476-f082-4dfb-941a-eab6ab2a9d11 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.538330] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1618.538531] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1618.539230] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82365ca8-7fce-4259-b6a6-534f50a3b3a5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.546180] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1618.546381] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65ed2e9d-69ca-4589-849c-ce223368a651 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.548389] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1618.548565] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1618.549486] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb7a7f1e-9f57-4739-b52f-a2b0ce66df98 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.553931] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1618.553931] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]523cbdc8-c135-d41b-6a66-3736c7a54cc9" [ 1618.553931] env[62507]: _type = "Task" [ 1618.553931] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.560695] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]523cbdc8-c135-d41b-6a66-3736c7a54cc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.614550] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1618.614872] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1618.615157] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Deleting the datastore file [datastore2] d888bcb9-89ef-41aa-b637-e2a15efd0ce8 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.615518] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0766bf2c-237f-432a-8f97-3333dee7073b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.622403] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Waiting for the task: (returnval){ [ 1618.622403] env[62507]: value = "task-2460080" [ 1618.622403] env[62507]: _type = "Task" [ 1618.622403] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.629845] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Task: {'id': task-2460080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.063829] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1619.064135] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating directory with path [datastore2] vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1619.064346] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-976f6fca-b592-4d93-a4b2-7611ea95eaf3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.075144] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Created directory with path [datastore2] vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1619.075350] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Fetch image to [datastore2] vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1619.075522] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1619.076246] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c3f078-a54d-4696-bf19-44caf76cc4f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.082732] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3063babf-39ae-4e93-a856-ceb076cc88d7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.091861] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff134e8-e59c-407b-aca2-14d9a0305023 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.121488] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231ceffd-5aa7-4f5a-be92-fe1e35bd9d75 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.132365] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c3d17986-26e3-4b19-9222-593c1a548ee6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.134017] env[62507]: DEBUG oslo_vmware.api [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Task: {'id': task-2460080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079532} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.134260] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1619.134439] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1619.134610] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1619.134784] env[62507]: INFO nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1619.136832] env[62507]: DEBUG nova.compute.claims [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1619.137034] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.137261] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.156572] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1619.211118] env[62507]: DEBUG oslo_vmware.rw_handles [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1619.273258] env[62507]: DEBUG oslo_vmware.rw_handles [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1619.273456] env[62507]: DEBUG oslo_vmware.rw_handles [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1619.383424] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b0a248-dcc8-43b9-9c2b-6e0359e0d4b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.390894] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22770e93-3544-440b-8398-bdeaf350d587 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.420510] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687a1d43-bf4b-45c6-a557-66afd5678df8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.427057] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a33540-4325-4206-9e01-ef5664f2ca6c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.439380] env[62507]: DEBUG nova.compute.provider_tree [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.447846] env[62507]: DEBUG nova.scheduler.client.report [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1619.460857] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.461374] env[62507]: ERROR nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1619.461374] env[62507]: Faults: ['InvalidArgument'] [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Traceback (most recent call last): [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self.driver.spawn(context, instance, image_meta, [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self._fetch_image_if_missing(context, vi) [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] image_cache(vi, tmp_image_ds_loc) [ 1619.461374] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] vm_util.copy_virtual_disk( [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] session._wait_for_task(vmdk_copy_task) [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] return self.wait_for_task(task_ref) [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] return evt.wait() [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] result = hub.switch() [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] return self.greenlet.switch() [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1619.461705] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] self.f(*self.args, **self.kw) [ 1619.462029] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1619.462029] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] raise exceptions.translate_fault(task_info.error) [ 1619.462029] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1619.462029] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Faults: ['InvalidArgument'] [ 1619.462029] env[62507]: ERROR nova.compute.manager [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] [ 1619.462029] env[62507]: DEBUG nova.compute.utils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1619.463339] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Build of instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 was re-scheduled: A specified parameter was not correct: fileType [ 1619.463339] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1619.463699] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1619.463870] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1619.464055] env[62507]: DEBUG nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1619.464227] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1619.758450] env[62507]: DEBUG nova.network.neutron [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.772344] env[62507]: INFO nova.compute.manager [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Took 0.31 seconds to deallocate network for instance. [ 1619.866029] env[62507]: INFO nova.scheduler.client.report [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Deleted allocations for instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 [ 1619.890178] env[62507]: DEBUG oslo_concurrency.lockutils [None req-aca30aa9-cec6-4866-9a88-d18190dff701 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.565s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.891462] env[62507]: DEBUG oslo_concurrency.lockutils [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 377.807s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.891630] env[62507]: DEBUG oslo_concurrency.lockutils [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Acquiring lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.891883] env[62507]: DEBUG oslo_concurrency.lockutils [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.892039] env[62507]: DEBUG oslo_concurrency.lockutils [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.894511] env[62507]: INFO nova.compute.manager [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Terminating instance [ 1619.895843] env[62507]: DEBUG nova.compute.manager [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1619.896046] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1619.896528] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca430b4f-4067-44e6-9f60-198018481b25 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.901968] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1619.908935] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c55f2e-d788-4a00-81c0-45caf1ab7900 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.939307] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d888bcb9-89ef-41aa-b637-e2a15efd0ce8 could not be found. [ 1619.939526] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1619.939705] env[62507]: INFO nova.compute.manager [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1619.939956] env[62507]: DEBUG oslo.service.loopingcall [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.942348] env[62507]: DEBUG nova.compute.manager [-] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1619.942450] env[62507]: DEBUG nova.network.neutron [-] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1619.955946] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.956198] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.957661] env[62507]: INFO nova.compute.claims [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1619.966777] env[62507]: DEBUG nova.network.neutron [-] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.977276] env[62507]: INFO nova.compute.manager [-] [instance: d888bcb9-89ef-41aa-b637-e2a15efd0ce8] Took 0.03 seconds to deallocate network for instance. [ 1620.066652] env[62507]: DEBUG oslo_concurrency.lockutils [None req-83d7f402-a600-4243-92c0-228ecb95fa53 tempest-ServerMetadataNegativeTestJSON-199346792 tempest-ServerMetadataNegativeTestJSON-199346792-project-member] Lock "d888bcb9-89ef-41aa-b637-e2a15efd0ce8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.165791] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feac845c-b4ce-4a31-a3fa-74e549ae4306 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.173832] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22562b57-f036-4f76-a617-21c32d7abc23 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.204652] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b328e3d3-c639-4701-be92-cacad4b4f4fa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.212069] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7012714-96ce-429e-91be-d47e168fe9a7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.225324] env[62507]: DEBUG nova.compute.provider_tree [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.233904] env[62507]: DEBUG nova.scheduler.client.report [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1620.248048] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.248346] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1620.286644] env[62507]: DEBUG nova.compute.utils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1620.287966] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1620.288152] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1620.298078] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1620.344711] env[62507]: DEBUG nova.policy [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e850f0777d094bb7beb1737396c2d595', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7be8070792ae4d6d8a72e7dd81ef0d4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1620.361293] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1620.387527] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1620.387775] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1620.387956] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.388175] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1620.388330] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.388483] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1620.388696] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1620.388858] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1620.389040] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1620.389216] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1620.389393] env[62507]: DEBUG nova.virt.hardware [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1620.390288] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96c61ee-44c2-41c5-9e65-8db39e0757b4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.397889] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357b7adb-dd05-46bb-be12-712c219087e1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.779784] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Successfully created port: 9a973e9f-aa50-424b-9bf9-0f28012b06f3 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.704676] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Successfully updated port: 9a973e9f-aa50-424b-9bf9-0f28012b06f3 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1621.718161] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "refresh_cache-f257db53-3c5f-4dfc-bd45-9f2b27b49401" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.718161] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquired lock "refresh_cache-f257db53-3c5f-4dfc-bd45-9f2b27b49401" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.718161] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1621.753220] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1621.794797] env[62507]: DEBUG nova.compute.manager [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Received event network-vif-plugged-9a973e9f-aa50-424b-9bf9-0f28012b06f3 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1621.795031] env[62507]: DEBUG oslo_concurrency.lockutils [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] Acquiring lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.795299] env[62507]: DEBUG oslo_concurrency.lockutils [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.795470] env[62507]: DEBUG oslo_concurrency.lockutils [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.795560] env[62507]: DEBUG nova.compute.manager [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] No waiting events found dispatching network-vif-plugged-9a973e9f-aa50-424b-9bf9-0f28012b06f3 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1621.795739] env[62507]: WARNING nova.compute.manager [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Received unexpected event network-vif-plugged-9a973e9f-aa50-424b-9bf9-0f28012b06f3 for instance with vm_state building and task_state spawning. [ 1621.795873] env[62507]: DEBUG nova.compute.manager [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Received event network-changed-9a973e9f-aa50-424b-9bf9-0f28012b06f3 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1621.796040] env[62507]: DEBUG nova.compute.manager [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Refreshing instance network info cache due to event network-changed-9a973e9f-aa50-424b-9bf9-0f28012b06f3. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1621.796212] env[62507]: DEBUG oslo_concurrency.lockutils [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] Acquiring lock "refresh_cache-f257db53-3c5f-4dfc-bd45-9f2b27b49401" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.913532] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Updating instance_info_cache with network_info: [{"id": "9a973e9f-aa50-424b-9bf9-0f28012b06f3", "address": "fa:16:3e:de:8d:e7", "network": {"id": "869e81b6-fb79-438c-afa2-fccafc89f1af", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-259391027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7be8070792ae4d6d8a72e7dd81ef0d4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a973e9f-aa", "ovs_interfaceid": "9a973e9f-aa50-424b-9bf9-0f28012b06f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.923773] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Releasing lock "refresh_cache-f257db53-3c5f-4dfc-bd45-9f2b27b49401" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.924073] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance network_info: |[{"id": "9a973e9f-aa50-424b-9bf9-0f28012b06f3", "address": "fa:16:3e:de:8d:e7", "network": {"id": "869e81b6-fb79-438c-afa2-fccafc89f1af", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-259391027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7be8070792ae4d6d8a72e7dd81ef0d4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a973e9f-aa", "ovs_interfaceid": "9a973e9f-aa50-424b-9bf9-0f28012b06f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1621.924373] env[62507]: DEBUG oslo_concurrency.lockutils [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] Acquired lock "refresh_cache-f257db53-3c5f-4dfc-bd45-9f2b27b49401" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.924554] env[62507]: DEBUG nova.network.neutron [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Refreshing network info cache for port 9a973e9f-aa50-424b-9bf9-0f28012b06f3 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1621.925909] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:8d:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a973e9f-aa50-424b-9bf9-0f28012b06f3', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1621.933015] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Creating folder: Project (7be8070792ae4d6d8a72e7dd81ef0d4d). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1621.934065] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4209e8c-0be4-45e7-bca6-fdc90df39970 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.947517] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Created folder: Project (7be8070792ae4d6d8a72e7dd81ef0d4d) in parent group-v497991. [ 1621.947702] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Creating folder: Instances. Parent ref: group-v498087. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1621.947972] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50d089b8-cb2a-4e8d-9888-bd0356b09e00 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.956799] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Created folder: Instances in parent group-v498087. [ 1621.957029] env[62507]: DEBUG oslo.service.loopingcall [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1621.957212] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1621.957409] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8df7dd95-5a68-4216-8042-81cfbdafc780 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.981317] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1621.981317] env[62507]: value = "task-2460083" [ 1621.981317] env[62507]: _type = "Task" [ 1621.981317] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.988576] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460083, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.276289] env[62507]: DEBUG nova.network.neutron [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Updated VIF entry in instance network info cache for port 9a973e9f-aa50-424b-9bf9-0f28012b06f3. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1622.276637] env[62507]: DEBUG nova.network.neutron [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Updating instance_info_cache with network_info: [{"id": "9a973e9f-aa50-424b-9bf9-0f28012b06f3", "address": "fa:16:3e:de:8d:e7", "network": {"id": "869e81b6-fb79-438c-afa2-fccafc89f1af", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-259391027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7be8070792ae4d6d8a72e7dd81ef0d4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a973e9f-aa", "ovs_interfaceid": "9a973e9f-aa50-424b-9bf9-0f28012b06f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.286515] env[62507]: DEBUG oslo_concurrency.lockutils [req-a1f971e6-35f1-415f-9cb1-37acfcfe454d req-61dcaba2-7447-47f3-b3b0-d803100c241f service nova] Releasing lock "refresh_cache-f257db53-3c5f-4dfc-bd45-9f2b27b49401" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.491581] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460083, 'name': CreateVM_Task, 'duration_secs': 0.30067} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.491737] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1622.492415] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.492585] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.492902] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1622.493163] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-856441d9-a9e3-4a76-9528-559f92c9d2a5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.497618] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Waiting for the task: (returnval){ [ 1622.497618] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5265faf6-0798-4247-04b9-22dce41613b6" [ 1622.497618] env[62507]: _type = "Task" [ 1622.497618] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.504985] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5265faf6-0798-4247-04b9-22dce41613b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.008099] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.008099] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1623.008099] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.552951] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.168409] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.168594] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1653.168719] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1653.191300] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.191469] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.191579] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.191709] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.191833] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.191958] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.192095] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.192220] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.192346] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.192463] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1653.192584] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1653.193436] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1654.168590] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.167922] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.168328] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.167598] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.167889] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1657.168610] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.169072] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.180672] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.180884] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.181062] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.181230] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1658.182366] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16204a5b-7a25-4eee-b2a8-a94d69164ab0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.191063] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b538e3-9ce4-4477-8915-e53628050a37 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.204637] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3975d5c7-e92c-4938-9191-f867b2c3edc6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.210534] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6791fac6-22a1-41e6-ad06-4b2d887d4d25 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.240298] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181179MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1658.240447] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.240634] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.320706] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.320860] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.320989] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321127] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321253] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321373] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321491] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321607] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321722] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.321835] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1658.332354] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1658.342172] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1658.342383] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1658.342556] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1658.476183] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0de5cc2-7d2b-4940-956c-6b95ad1bf7c2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.483642] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c73d6ba-95cd-4034-91cb-28b1ae3eb7b0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.512225] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e166823-859b-4ba8-baa0-c84cdb36ea39 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.519094] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e7d50d-f523-4f5c-abce-279ea8653034 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.533505] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.541510] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1658.554560] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1658.554779] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.314s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.028104] env[62507]: WARNING oslo_vmware.rw_handles [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1668.028104] env[62507]: ERROR oslo_vmware.rw_handles [ 1668.029439] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1668.030668] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1668.030911] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Copying Virtual Disk [datastore2] vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/e9afde86-8cca-49a9-88eb-a5ce73f698aa/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1668.031206] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41e9fdbd-152d-4847-ae57-bdfbddfa81fe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.038773] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1668.038773] env[62507]: value = "task-2460084" [ 1668.038773] env[62507]: _type = "Task" [ 1668.038773] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.046374] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': task-2460084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.549497] env[62507]: DEBUG oslo_vmware.exceptions [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1668.549789] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.550420] env[62507]: ERROR nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1668.550420] env[62507]: Faults: ['InvalidArgument'] [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Traceback (most recent call last): [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] yield resources [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self.driver.spawn(context, instance, image_meta, [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self._fetch_image_if_missing(context, vi) [ 1668.550420] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] image_cache(vi, tmp_image_ds_loc) [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] vm_util.copy_virtual_disk( [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] session._wait_for_task(vmdk_copy_task) [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] return self.wait_for_task(task_ref) [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] return evt.wait() [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] result = hub.switch() [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1668.550786] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] return self.greenlet.switch() [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self.f(*self.args, **self.kw) [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] raise exceptions.translate_fault(task_info.error) [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Faults: ['InvalidArgument'] [ 1668.551413] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] [ 1668.551413] env[62507]: INFO nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Terminating instance [ 1668.552419] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.552633] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1668.552871] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df438c6d-5b56-4176-8e44-6f7709327958 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.556032] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1668.556032] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1668.556261] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d186a5-1efb-43a1-bce9-45876764656b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.563152] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1668.563373] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f16e1bf-9fdf-465d-9be7-790996dce1ef {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.566650] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1668.566650] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1668.566650] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45870403-0709-455d-8a90-e0f26d73ee92 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.572273] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1668.572273] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52020a94-dedb-625c-eb42-243182476328" [ 1668.572273] env[62507]: _type = "Task" [ 1668.572273] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.578237] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52020a94-dedb-625c-eb42-243182476328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.626980] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1668.627292] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1668.627521] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Deleting the datastore file [datastore2] 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1668.627829] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90d3d31e-f462-4670-8f6a-cb77f611f319 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.634572] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1668.634572] env[62507]: value = "task-2460086" [ 1668.634572] env[62507]: _type = "Task" [ 1668.634572] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.642227] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': task-2460086, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.081532] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1669.081911] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating directory with path [datastore2] vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1669.082039] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52569852-d3ea-4cc8-b7eb-41af6227ae85 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.095556] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Created directory with path [datastore2] vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1669.095754] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Fetch image to [datastore2] vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1669.095934] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1669.096671] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd856c2-7c90-4845-9ab1-877cfabda250 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.103209] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ea6d3a-d514-42c1-8194-36bb94c958e6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.111789] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2e8e9b-e01d-4128-9a3e-c159745c0cec {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.144572] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b498dd-4bf0-4bf8-bb31-b2912c20c66c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.151255] env[62507]: DEBUG oslo_vmware.api [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': task-2460086, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084413} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.152593] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1669.152784] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1669.152956] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1669.153148] env[62507]: INFO nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1669.154880] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-81aa188f-150f-4cf2-9061-1c9a40a01266 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.156661] env[62507]: DEBUG nova.compute.claims [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1669.156838] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.157060] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.179861] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1669.229554] env[62507]: DEBUG oslo_vmware.rw_handles [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1669.291767] env[62507]: DEBUG oslo_vmware.rw_handles [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1669.291767] env[62507]: DEBUG oslo_vmware.rw_handles [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1669.388722] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70532fd-dedc-4288-81e2-5eb29b65fd52 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.396146] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b452bcd-ef64-4f60-9543-2a5a03a79255 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.424505] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5312cc39-5d64-4b8b-9e21-c34ee95f710f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.430903] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e0afa2-15f5-41df-b06f-2a6d67f7371e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.443996] env[62507]: DEBUG nova.compute.provider_tree [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1669.452280] env[62507]: DEBUG nova.scheduler.client.report [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1669.468525] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.311s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.469039] env[62507]: ERROR nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1669.469039] env[62507]: Faults: ['InvalidArgument'] [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Traceback (most recent call last): [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self.driver.spawn(context, instance, image_meta, [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self._fetch_image_if_missing(context, vi) [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] image_cache(vi, tmp_image_ds_loc) [ 1669.469039] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] vm_util.copy_virtual_disk( [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] session._wait_for_task(vmdk_copy_task) [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] return self.wait_for_task(task_ref) [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] return evt.wait() [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] result = hub.switch() [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] return self.greenlet.switch() [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1669.469427] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] self.f(*self.args, **self.kw) [ 1669.469808] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1669.469808] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] raise exceptions.translate_fault(task_info.error) [ 1669.469808] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1669.469808] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Faults: ['InvalidArgument'] [ 1669.469808] env[62507]: ERROR nova.compute.manager [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] [ 1669.469808] env[62507]: DEBUG nova.compute.utils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1669.471051] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Build of instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 was re-scheduled: A specified parameter was not correct: fileType [ 1669.471051] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1669.471448] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1669.471635] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1669.471809] env[62507]: DEBUG nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1669.471970] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1669.774220] env[62507]: DEBUG nova.network.neutron [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.786409] env[62507]: INFO nova.compute.manager [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Took 0.31 seconds to deallocate network for instance. [ 1669.882694] env[62507]: INFO nova.scheduler.client.report [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Deleted allocations for instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 [ 1669.907297] env[62507]: DEBUG oslo_concurrency.lockutils [None req-753119d9-536d-475b-bd75-a549b5b6ceb0 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.824s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.908494] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 189.088s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.908713] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.908926] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.909116] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.911312] env[62507]: INFO nova.compute.manager [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Terminating instance [ 1669.913504] env[62507]: DEBUG nova.compute.manager [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1669.913708] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1669.914243] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a5123ea-a132-438b-a03e-6d59f80e6caf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.924146] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211e5764-bf58-4e86-a90b-a1a738efea5d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.934743] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1669.955489] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249 could not be found. [ 1669.955695] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1669.955870] env[62507]: INFO nova.compute.manager [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1669.956127] env[62507]: DEBUG oslo.service.loopingcall [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1669.956345] env[62507]: DEBUG nova.compute.manager [-] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1669.956441] env[62507]: DEBUG nova.network.neutron [-] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1669.980808] env[62507]: DEBUG nova.network.neutron [-] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.983487] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.983487] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.984754] env[62507]: INFO nova.compute.claims [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1669.991621] env[62507]: INFO nova.compute.manager [-] [instance: 8e22d586-0ab8-4968-b0d1-2ef1cd8c0249] Took 0.03 seconds to deallocate network for instance. [ 1670.083425] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8f7086b2-f9ff-4d3b-88ad-8c98be1e4d95 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "8e22d586-0ab8-4968-b0d1-2ef1cd8c0249" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.162310] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09e9e11-8a4c-4616-ad81-23eb852acd1e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.170430] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c18d0ad-be52-48d6-9272-7d261f6b9f5d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.200173] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2eae45-c9d5-41ae-a17d-afbab4a18286 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.206831] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b2a559-ab60-476c-87c1-da0e8a9166e6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.219264] env[62507]: DEBUG nova.compute.provider_tree [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.229175] env[62507]: DEBUG nova.scheduler.client.report [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1670.241474] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.241932] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1670.279499] env[62507]: DEBUG nova.compute.utils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1670.280655] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1670.280824] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1670.289383] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1670.346522] env[62507]: DEBUG nova.policy [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b12f256bd3ed42299824cce572ed252a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2d1f653b60f4006b21f82e2a2d4ecfb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1670.350023] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1670.375463] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1670.375696] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1670.375853] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1670.376052] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1670.376207] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1670.376358] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1670.376569] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1670.376730] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1670.376899] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1670.377077] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1670.377264] env[62507]: DEBUG nova.virt.hardware [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1670.378133] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e45110-523a-4159-ac86-b80addfc90f8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.386052] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9298d83b-960f-47c3-920b-5dd8f85747a4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.716503] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Successfully created port: aa2ffda6-92fd-4caf-8394-7834da1ada95 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.561093] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Successfully updated port: aa2ffda6-92fd-4caf-8394-7834da1ada95 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1671.579355] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "refresh_cache-4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.579355] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquired lock "refresh_cache-4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.579355] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1671.837540] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1671.934419] env[62507]: DEBUG nova.compute.manager [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Received event network-vif-plugged-aa2ffda6-92fd-4caf-8394-7834da1ada95 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1671.934764] env[62507]: DEBUG oslo_concurrency.lockutils [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] Acquiring lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.934849] env[62507]: DEBUG oslo_concurrency.lockutils [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.935032] env[62507]: DEBUG oslo_concurrency.lockutils [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.935207] env[62507]: DEBUG nova.compute.manager [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] No waiting events found dispatching network-vif-plugged-aa2ffda6-92fd-4caf-8394-7834da1ada95 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1671.935370] env[62507]: WARNING nova.compute.manager [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Received unexpected event network-vif-plugged-aa2ffda6-92fd-4caf-8394-7834da1ada95 for instance with vm_state building and task_state spawning. [ 1671.935534] env[62507]: DEBUG nova.compute.manager [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Received event network-changed-aa2ffda6-92fd-4caf-8394-7834da1ada95 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1671.935689] env[62507]: DEBUG nova.compute.manager [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Refreshing instance network info cache due to event network-changed-aa2ffda6-92fd-4caf-8394-7834da1ada95. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1671.935858] env[62507]: DEBUG oslo_concurrency.lockutils [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] Acquiring lock "refresh_cache-4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.039300] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Updating instance_info_cache with network_info: [{"id": "aa2ffda6-92fd-4caf-8394-7834da1ada95", "address": "fa:16:3e:76:df:6b", "network": {"id": "3e78fb08-1fae-4d8b-9d73-9df8f9a0bc8d", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1629412478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2d1f653b60f4006b21f82e2a2d4ecfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa2ffda6-92", "ovs_interfaceid": "aa2ffda6-92fd-4caf-8394-7834da1ada95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.053670] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Releasing lock "refresh_cache-4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.053994] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance network_info: |[{"id": "aa2ffda6-92fd-4caf-8394-7834da1ada95", "address": "fa:16:3e:76:df:6b", "network": {"id": "3e78fb08-1fae-4d8b-9d73-9df8f9a0bc8d", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1629412478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2d1f653b60f4006b21f82e2a2d4ecfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa2ffda6-92", "ovs_interfaceid": "aa2ffda6-92fd-4caf-8394-7834da1ada95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1672.054315] env[62507]: DEBUG oslo_concurrency.lockutils [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] Acquired lock "refresh_cache-4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.054521] env[62507]: DEBUG nova.network.neutron [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Refreshing network info cache for port aa2ffda6-92fd-4caf-8394-7834da1ada95 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1672.055598] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:df:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa2ffda6-92fd-4caf-8394-7834da1ada95', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1672.065497] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Creating folder: Project (a2d1f653b60f4006b21f82e2a2d4ecfb). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1672.070707] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f772687-d4d9-47c2-a15c-76a3a0c348d5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.081056] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Created folder: Project (a2d1f653b60f4006b21f82e2a2d4ecfb) in parent group-v497991. [ 1672.081352] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Creating folder: Instances. Parent ref: group-v498090. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1672.081624] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f095bce8-287d-4d8d-9f6c-827d14db927d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.090587] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Created folder: Instances in parent group-v498090. [ 1672.091146] env[62507]: DEBUG oslo.service.loopingcall [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.091385] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1672.091626] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-798c63a7-1b8c-44f4-b67e-475925e6c94b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.118149] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1672.118149] env[62507]: value = "task-2460089" [ 1672.118149] env[62507]: _type = "Task" [ 1672.118149] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.128796] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460089, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.616929] env[62507]: DEBUG nova.network.neutron [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Updated VIF entry in instance network info cache for port aa2ffda6-92fd-4caf-8394-7834da1ada95. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1672.617319] env[62507]: DEBUG nova.network.neutron [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Updating instance_info_cache with network_info: [{"id": "aa2ffda6-92fd-4caf-8394-7834da1ada95", "address": "fa:16:3e:76:df:6b", "network": {"id": "3e78fb08-1fae-4d8b-9d73-9df8f9a0bc8d", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1629412478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2d1f653b60f4006b21f82e2a2d4ecfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa2ffda6-92", "ovs_interfaceid": "aa2ffda6-92fd-4caf-8394-7834da1ada95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.627579] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460089, 'name': CreateVM_Task, 'duration_secs': 0.291007} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.628365] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1672.629384] env[62507]: DEBUG oslo_concurrency.lockutils [req-b19e0eec-7ec9-451b-9bf3-2141d811572d req-ebfdff26-da16-454f-84ce-019e12dfb712 service nova] Releasing lock "refresh_cache-4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.630101] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.630263] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.630600] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1672.631087] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49adaf08-3369-4241-aff3-1957594e24f1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.635807] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Waiting for the task: (returnval){ [ 1672.635807] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52ed1692-7d0c-1b24-83ea-54031c6b99fb" [ 1672.635807] env[62507]: _type = "Task" [ 1672.635807] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.644557] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52ed1692-7d0c-1b24-83ea-54031c6b99fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.148446] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.149147] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1673.149147] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.103035] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "39471434-14af-468a-8b55-5fc58957e7b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.103309] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "39471434-14af-468a-8b55-5fc58957e7b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.127175] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "16295fdd-45d6-492f-99d9-1006ec42c097" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.127395] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "16295fdd-45d6-492f-99d9-1006ec42c097" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.942581] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "a76d0987-29c2-423b-972c-990639986d5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.942913] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "a76d0987-29c2-423b-972c-990639986d5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.549817] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.549817] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.549817] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1713.549817] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1713.571907] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572080] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572219] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572349] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572479] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572604] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572729] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572849] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.572999] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.573144] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1713.573269] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1715.167614] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.188854] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.190988] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.190988] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1716.168048] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1716.168048] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1717.167765] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.045288] env[62507]: WARNING oslo_vmware.rw_handles [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1718.045288] env[62507]: ERROR oslo_vmware.rw_handles [ 1718.045930] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1718.047781] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1718.048037] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Copying Virtual Disk [datastore2] vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/dfe487b4-996d-4412-aa3c-6246c72d113b/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1718.048337] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c514415-86ce-45ca-86e3-4105941c2ace {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.056200] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1718.056200] env[62507]: value = "task-2460090" [ 1718.056200] env[62507]: _type = "Task" [ 1718.056200] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.063929] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': task-2460090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.167707] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.566233] env[62507]: DEBUG oslo_vmware.exceptions [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1718.566530] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.567092] env[62507]: ERROR nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1718.567092] env[62507]: Faults: ['InvalidArgument'] [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Traceback (most recent call last): [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] yield resources [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self.driver.spawn(context, instance, image_meta, [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self._fetch_image_if_missing(context, vi) [ 1718.567092] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] image_cache(vi, tmp_image_ds_loc) [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] vm_util.copy_virtual_disk( [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] session._wait_for_task(vmdk_copy_task) [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] return self.wait_for_task(task_ref) [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] return evt.wait() [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] result = hub.switch() [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1718.567523] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] return self.greenlet.switch() [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self.f(*self.args, **self.kw) [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] raise exceptions.translate_fault(task_info.error) [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Faults: ['InvalidArgument'] [ 1718.567992] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] [ 1718.567992] env[62507]: INFO nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Terminating instance [ 1718.569028] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.569240] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1718.569475] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51607db4-1edb-43af-9436-492169abf4bc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.571890] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1718.572110] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1718.572832] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3ea389-fbf3-4da9-baf3-e33c4eb0a26e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.579731] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1718.579974] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f388ef1-f1d7-4526-b81e-051002be704a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.582205] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1718.582380] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1718.583393] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-679ab6b8-7bd8-40dc-b2ee-1eca5c851c6f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.589373] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Waiting for the task: (returnval){ [ 1718.589373] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5252d43c-3b28-1d1f-a900-bb086fc5a42c" [ 1718.589373] env[62507]: _type = "Task" [ 1718.589373] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.598012] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5252d43c-3b28-1d1f-a900-bb086fc5a42c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.648791] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1718.649026] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1718.649216] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Deleting the datastore file [datastore2] f4f51028-a313-4d17-bcf1-4decec2d3c3d {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1718.649476] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01de2300-7856-43b1-ac53-3c645012742e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.655233] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for the task: (returnval){ [ 1718.655233] env[62507]: value = "task-2460092" [ 1718.655233] env[62507]: _type = "Task" [ 1718.655233] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.662685] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': task-2460092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.102092] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1719.102092] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Creating directory with path [datastore2] vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.102092] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1615380-b52f-41ce-9a31-bc7c1389ad3e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.113078] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Created directory with path [datastore2] vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.113321] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Fetch image to [datastore2] vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1719.113536] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1719.114268] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bb980e-61d1-49af-89e1-58678f841c08 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.120650] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2257d5e-70dd-4088-8ca7-e8a5b687ad09 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.129531] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62010e4-822a-4604-a6b4-1476cd34b39b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.163218] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be15ea01-634f-488f-8ea0-1b8c5e2d517e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.167393] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.172684] env[62507]: DEBUG oslo_vmware.api [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Task: {'id': task-2460092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08564} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.173165] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b84df664-39e6-45b1-8424-f6938b7fb055 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.174885] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1719.175097] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1719.175280] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1719.175454] env[62507]: INFO nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1719.177438] env[62507]: DEBUG nova.compute.claims [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1719.177612] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.177827] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.181438] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.196542] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1719.251452] env[62507]: DEBUG oslo_vmware.rw_handles [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1719.309200] env[62507]: DEBUG nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Refreshing inventories for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1719.313513] env[62507]: DEBUG oslo_vmware.rw_handles [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1719.313764] env[62507]: DEBUG oslo_vmware.rw_handles [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1719.333463] env[62507]: DEBUG nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Updating ProviderTree inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1719.333746] env[62507]: DEBUG nova.compute.provider_tree [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1719.345168] env[62507]: DEBUG nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Refreshing aggregate associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, aggregates: None {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1719.363124] env[62507]: DEBUG nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Refreshing trait associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1719.527165] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2046ff75-cb2a-4ab6-94e8-5c4c04dbf4a8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.534866] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9094a32-397a-4603-9ee9-018bda48b091 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.564591] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff206119-0e20-4cfd-ac2c-bc84a3e0890f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.572946] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ffef5f-561a-401e-8528-21f5ea61e830 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.585431] env[62507]: DEBUG nova.compute.provider_tree [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.594702] env[62507]: DEBUG nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1719.609271] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.431s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.609816] env[62507]: ERROR nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1719.609816] env[62507]: Faults: ['InvalidArgument'] [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Traceback (most recent call last): [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self.driver.spawn(context, instance, image_meta, [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self._fetch_image_if_missing(context, vi) [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] image_cache(vi, tmp_image_ds_loc) [ 1719.609816] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] vm_util.copy_virtual_disk( [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] session._wait_for_task(vmdk_copy_task) [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] return self.wait_for_task(task_ref) [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] return evt.wait() [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] result = hub.switch() [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] return self.greenlet.switch() [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1719.610209] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] self.f(*self.args, **self.kw) [ 1719.610660] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1719.610660] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] raise exceptions.translate_fault(task_info.error) [ 1719.610660] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1719.610660] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Faults: ['InvalidArgument'] [ 1719.610660] env[62507]: ERROR nova.compute.manager [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] [ 1719.610660] env[62507]: DEBUG nova.compute.utils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1719.611699] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.430s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.611927] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.612102] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1719.612749] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Build of instance f4f51028-a313-4d17-bcf1-4decec2d3c3d was re-scheduled: A specified parameter was not correct: fileType [ 1719.612749] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1719.613145] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1719.613320] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1719.613493] env[62507]: DEBUG nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1719.613665] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1719.615762] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347fc08a-40c1-4142-ada4-35e6722c8e39 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.624233] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764bb7ff-b951-4200-b1e5-af1a41e3342e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.637870] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472ab641-0d36-4b7c-83de-92c885cf9f9e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.644049] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b2e5f5-0552-4adc-9b32-93cdecd45278 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.673567] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181124MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1719.673679] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.673877] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.747259] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.747422] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 65efc608-6573-4690-8d11-2f0459647d70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.747551] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.747676] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.747811] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.747966] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.748110] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.748229] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.748345] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.748459] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.758111] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.769155] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.779148] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.788704] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.789276] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1719.789276] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1719.945584] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4bed23-f6ec-4952-a0c3-39fec13d5091 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.953412] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d563863-8545-451c-b7d5-1c80250c2402 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.984909] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d01d12-f11d-497e-86e8-55e3e015232e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.994266] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed5476a-d160-412d-b28d-76ab17018337 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.009274] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1720.013027] env[62507]: DEBUG nova.network.neutron [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.019654] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1720.025560] env[62507]: INFO nova.compute.manager [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Took 0.41 seconds to deallocate network for instance. [ 1720.037309] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1720.037498] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.364s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.113213] env[62507]: INFO nova.scheduler.client.report [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Deleted allocations for instance f4f51028-a313-4d17-bcf1-4decec2d3c3d [ 1720.133101] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e13ad9f-715f-466f-ad27-c9634b13be16 tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.651s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.134500] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.129s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.134617] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Acquiring lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.135036] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.135036] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.137827] env[62507]: INFO nova.compute.manager [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Terminating instance [ 1720.139875] env[62507]: DEBUG nova.compute.manager [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1720.140105] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1720.140808] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2a8d923-125e-4dcc-8d64-4adbf3983c05 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.150587] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6126d9a4-c08c-413e-b41f-6d229003ebbf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.162017] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1720.184165] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4f51028-a313-4d17-bcf1-4decec2d3c3d could not be found. [ 1720.184413] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1720.184620] env[62507]: INFO nova.compute.manager [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1720.184904] env[62507]: DEBUG oslo.service.loopingcall [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1720.185175] env[62507]: DEBUG nova.compute.manager [-] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1720.185287] env[62507]: DEBUG nova.network.neutron [-] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1720.210667] env[62507]: DEBUG nova.network.neutron [-] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.215513] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.215835] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.217989] env[62507]: INFO nova.compute.claims [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1720.221009] env[62507]: INFO nova.compute.manager [-] [instance: f4f51028-a313-4d17-bcf1-4decec2d3c3d] Took 0.04 seconds to deallocate network for instance. [ 1720.313171] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c5df889c-ed99-4210-b939-3b4cbd693abe tempest-ListServerFiltersTestJSON-1253904850 tempest-ListServerFiltersTestJSON-1253904850-project-member] Lock "f4f51028-a313-4d17-bcf1-4decec2d3c3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.427511] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2deec9fd-9d23-4e33-a229-15341ac47f5e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.435050] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5b019d-545f-486e-92e9-144f0bb9491e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.465938] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a65867-f4ef-4ba3-a8b9-499e69de9ca5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.473325] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6cfa55-395b-468a-8dd8-1905ce1396ad {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.486516] env[62507]: DEBUG nova.compute.provider_tree [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1720.502549] env[62507]: DEBUG nova.scheduler.client.report [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1720.519268] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.519659] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1720.553269] env[62507]: DEBUG nova.compute.utils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1720.558023] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1720.558023] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1720.565302] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1720.635667] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1720.662534] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1720.662782] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1720.662943] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1720.663143] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1720.663292] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1720.663440] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1720.663648] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1720.663809] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1720.663977] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1720.664156] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1720.664333] env[62507]: DEBUG nova.virt.hardware [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1720.665197] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed95c66-d48d-4a7c-9df9-f7141a919139 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.675266] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bababaa8-19f2-4771-9a7e-0bf800818f17 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.703610] env[62507]: DEBUG nova.policy [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8187d3d405c244f995763c4d67515b6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c850b58d9b554e81b09f26703a6f50f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1721.018705] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Successfully created port: 6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1721.728207] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Successfully updated port: 6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.742992] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "refresh_cache-1c56d4af-ba43-4141-86d6-880ff384041e" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.743164] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "refresh_cache-1c56d4af-ba43-4141-86d6-880ff384041e" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.743316] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1721.796730] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1721.957418] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Updating instance_info_cache with network_info: [{"id": "6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367", "address": "fa:16:3e:3b:35:47", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d0ea2b6-93", "ovs_interfaceid": "6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.968283] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "refresh_cache-1c56d4af-ba43-4141-86d6-880ff384041e" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.968543] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance network_info: |[{"id": "6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367", "address": "fa:16:3e:3b:35:47", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d0ea2b6-93", "ovs_interfaceid": "6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1721.968943] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:35:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1721.976547] env[62507]: DEBUG oslo.service.loopingcall [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1721.976991] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1721.977224] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb776d8c-ebcc-470e-99f3-01b6802ae18b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.997175] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1721.997175] env[62507]: value = "task-2460093" [ 1721.997175] env[62507]: _type = "Task" [ 1721.997175] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.004893] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460093, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.051112] env[62507]: DEBUG nova.compute.manager [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Received event network-vif-plugged-6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1722.051380] env[62507]: DEBUG oslo_concurrency.lockutils [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] Acquiring lock "1c56d4af-ba43-4141-86d6-880ff384041e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.051754] env[62507]: DEBUG oslo_concurrency.lockutils [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] Lock "1c56d4af-ba43-4141-86d6-880ff384041e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.051754] env[62507]: DEBUG oslo_concurrency.lockutils [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] Lock "1c56d4af-ba43-4141-86d6-880ff384041e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.051930] env[62507]: DEBUG nova.compute.manager [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] No waiting events found dispatching network-vif-plugged-6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1722.052218] env[62507]: WARNING nova.compute.manager [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Received unexpected event network-vif-plugged-6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 for instance with vm_state building and task_state spawning. [ 1722.052313] env[62507]: DEBUG nova.compute.manager [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Received event network-changed-6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1722.052425] env[62507]: DEBUG nova.compute.manager [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Refreshing instance network info cache due to event network-changed-6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1722.052604] env[62507]: DEBUG oslo_concurrency.lockutils [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] Acquiring lock "refresh_cache-1c56d4af-ba43-4141-86d6-880ff384041e" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.052793] env[62507]: DEBUG oslo_concurrency.lockutils [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] Acquired lock "refresh_cache-1c56d4af-ba43-4141-86d6-880ff384041e" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.053050] env[62507]: DEBUG nova.network.neutron [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Refreshing network info cache for port 6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1722.352489] env[62507]: DEBUG nova.network.neutron [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Updated VIF entry in instance network info cache for port 6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1722.353426] env[62507]: DEBUG nova.network.neutron [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Updating instance_info_cache with network_info: [{"id": "6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367", "address": "fa:16:3e:3b:35:47", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d0ea2b6-93", "ovs_interfaceid": "6d0ea2b6-9378-4b8b-9043-6d0cf1a3a367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.361943] env[62507]: DEBUG oslo_concurrency.lockutils [req-8fea159d-1d65-48c5-9ed5-bbbe25baebb3 req-7b51b510-c25d-4cf5-95ec-912e628cd931 service nova] Releasing lock "refresh_cache-1c56d4af-ba43-4141-86d6-880ff384041e" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.507027] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460093, 'name': CreateVM_Task, 'duration_secs': 0.298597} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.507168] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1722.507921] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.508102] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.508412] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1722.508653] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4c16cff-210e-4356-bf97-06d8fe4b3982 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.513048] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 1722.513048] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52de0d43-1f01-3889-d7e8-0e03be783504" [ 1722.513048] env[62507]: _type = "Task" [ 1722.513048] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.520176] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52de0d43-1f01-3889-d7e8-0e03be783504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.023519] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.023855] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1723.023963] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.171657] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.172051] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances with incomplete migration {{(pid=62507) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1729.167611] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1730.175655] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1730.175970] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1730.186352] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] There are 0 instances to clean {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1736.322051] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.341875] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Getting list of instances from cluster (obj){ [ 1736.341875] env[62507]: value = "domain-c8" [ 1736.341875] env[62507]: _type = "ClusterComputeResource" [ 1736.341875] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1736.343123] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494e36e9-9c84-41ef-8412-93a3f101956f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.359159] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Got total of 10 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1736.359327] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 65efc608-6573-4690-8d11-2f0459647d70 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.359514] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.359676] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 01043570-d72d-4a97-8c51-cfe30b25b82b {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.359831] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid ef5633ea-273d-429f-9a02-326711b73bab {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.359985] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 7ff089f8-f304-4c2e-bf3d-16997fe8968c {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.360152] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 637de77e-d142-45ca-8a4e-3bf365e31502 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.360302] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid fb7f3a79-bd28-48b9-9a64-db1750b0f716 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.360448] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid f257db53-3c5f-4dfc-bd45-9f2b27b49401 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.360597] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.360809] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 1c56d4af-ba43-4141-86d6-880ff384041e {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1736.361152] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "65efc608-6573-4690-8d11-2f0459647d70" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.361415] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.361618] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "01043570-d72d-4a97-8c51-cfe30b25b82b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.361860] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "ef5633ea-273d-429f-9a02-326711b73bab" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.362094] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.362298] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "637de77e-d142-45ca-8a4e-3bf365e31502" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.362493] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.362684] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.362871] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.363075] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "1c56d4af-ba43-4141-86d6-880ff384041e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.935085] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "56c176ec-c6e5-4f48-a5be-badef25c5667" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.935412] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.237258] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e95a454-32d3-44f4-a7be-9cfef5324496 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "c5d66c7b-54e4-4a5e-8207-0cadce10c4df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.237569] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e95a454-32d3-44f4-a7be-9cfef5324496 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "c5d66c7b-54e4-4a5e-8207-0cadce10c4df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.073286] env[62507]: WARNING oslo_vmware.rw_handles [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1766.073286] env[62507]: ERROR oslo_vmware.rw_handles [ 1766.073904] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1766.076576] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1766.076823] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Copying Virtual Disk [datastore2] vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/c5ba2894-4ea5-4063-af65-c01f3b5945a2/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1766.077131] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-736eb7e7-3dbb-42b6-afcc-7e0468d64422 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.085319] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Waiting for the task: (returnval){ [ 1766.085319] env[62507]: value = "task-2460094" [ 1766.085319] env[62507]: _type = "Task" [ 1766.085319] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.093504] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Task: {'id': task-2460094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.595171] env[62507]: DEBUG oslo_vmware.exceptions [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1766.595484] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.596074] env[62507]: ERROR nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1766.596074] env[62507]: Faults: ['InvalidArgument'] [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] Traceback (most recent call last): [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] yield resources [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self.driver.spawn(context, instance, image_meta, [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self._fetch_image_if_missing(context, vi) [ 1766.596074] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] image_cache(vi, tmp_image_ds_loc) [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] vm_util.copy_virtual_disk( [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] session._wait_for_task(vmdk_copy_task) [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] return self.wait_for_task(task_ref) [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] return evt.wait() [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] result = hub.switch() [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1766.596451] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] return self.greenlet.switch() [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self.f(*self.args, **self.kw) [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] raise exceptions.translate_fault(task_info.error) [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] Faults: ['InvalidArgument'] [ 1766.596829] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] [ 1766.596829] env[62507]: INFO nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Terminating instance [ 1766.598039] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.598254] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1766.598496] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-981ae887-62c5-4443-ae09-5c70c0cc09d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.602021] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1766.602227] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1766.602955] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085a060a-dc47-4aad-a45f-3a993058921f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.609817] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1766.610804] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f44c5fa-431a-4aac-ba7d-3a3ee45dae50 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.612174] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1766.612351] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1766.612997] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20b0ff41-0cdb-4ad0-8913-4a1376b37a44 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.619076] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 1766.619076] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52054131-4c3b-d237-c877-a475049b95c4" [ 1766.619076] env[62507]: _type = "Task" [ 1766.619076] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.631155] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52054131-4c3b-d237-c877-a475049b95c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.677537] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1766.677785] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1766.677935] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Deleting the datastore file [datastore2] 65efc608-6573-4690-8d11-2f0459647d70 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1766.678221] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e142ce27-d990-40e5-93b4-8e787fc27bb5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.684342] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Waiting for the task: (returnval){ [ 1766.684342] env[62507]: value = "task-2460096" [ 1766.684342] env[62507]: _type = "Task" [ 1766.684342] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.691575] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Task: {'id': task-2460096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.130106] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1767.130106] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating directory with path [datastore2] vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1767.130106] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f544938-f62b-4fd6-a162-8fbbf1d63d60 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.141857] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Created directory with path [datastore2] vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1767.142079] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Fetch image to [datastore2] vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1767.142263] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1767.143029] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0f1fb4-2cd9-4703-8434-42204c9cdfd2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.149884] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb10557-c06d-4488-80c3-cb79fb8b068a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.159127] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ccc420-77c9-4abd-9e2a-5c2c02d0eb9d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.191313] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31d18c4-3652-4963-ab50-7b2feaa28e6b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.197997] env[62507]: DEBUG oslo_vmware.api [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Task: {'id': task-2460096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080936} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.199354] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1767.199551] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1767.199729] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1767.199921] env[62507]: INFO nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1767.201711] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9521284e-6832-4a48-858e-c8ca1169ad59 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.203608] env[62507]: DEBUG nova.compute.claims [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1767.203781] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.203996] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.227064] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1767.277990] env[62507]: DEBUG oslo_vmware.rw_handles [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1767.338502] env[62507]: DEBUG oslo_vmware.rw_handles [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1767.338701] env[62507]: DEBUG oslo_vmware.rw_handles [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1767.474082] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689e9df4-8507-435e-bcf4-751233a84fc5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.481321] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1aee18f-5ef1-457f-8553-6a0378d8a828 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.510870] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ea0ee5-bfdd-4313-ab7a-20cf625577c9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.517902] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ab1b3d-eeae-46c4-bd11-03589099f17e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.531041] env[62507]: DEBUG nova.compute.provider_tree [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.541034] env[62507]: DEBUG nova.scheduler.client.report [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1767.557451] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.353s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.558034] env[62507]: ERROR nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1767.558034] env[62507]: Faults: ['InvalidArgument'] [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] Traceback (most recent call last): [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self.driver.spawn(context, instance, image_meta, [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self._fetch_image_if_missing(context, vi) [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] image_cache(vi, tmp_image_ds_loc) [ 1767.558034] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] vm_util.copy_virtual_disk( [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] session._wait_for_task(vmdk_copy_task) [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] return self.wait_for_task(task_ref) [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] return evt.wait() [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] result = hub.switch() [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] return self.greenlet.switch() [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1767.558431] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] self.f(*self.args, **self.kw) [ 1767.558845] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1767.558845] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] raise exceptions.translate_fault(task_info.error) [ 1767.558845] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1767.558845] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] Faults: ['InvalidArgument'] [ 1767.558845] env[62507]: ERROR nova.compute.manager [instance: 65efc608-6573-4690-8d11-2f0459647d70] [ 1767.558845] env[62507]: DEBUG nova.compute.utils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1767.560185] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Build of instance 65efc608-6573-4690-8d11-2f0459647d70 was re-scheduled: A specified parameter was not correct: fileType [ 1767.560185] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1767.560583] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1767.560759] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1767.560956] env[62507]: DEBUG nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1767.561146] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1767.935954] env[62507]: DEBUG nova.network.neutron [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.946141] env[62507]: INFO nova.compute.manager [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Took 0.38 seconds to deallocate network for instance. [ 1768.038209] env[62507]: INFO nova.scheduler.client.report [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Deleted allocations for instance 65efc608-6573-4690-8d11-2f0459647d70 [ 1768.059495] env[62507]: DEBUG oslo_concurrency.lockutils [None req-ee57a7f4-5f39-4af2-8abc-d516094efc91 tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "65efc608-6573-4690-8d11-2f0459647d70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.066s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.060764] env[62507]: DEBUG oslo_concurrency.lockutils [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "65efc608-6573-4690-8d11-2f0459647d70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.192s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.060888] env[62507]: DEBUG oslo_concurrency.lockutils [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Acquiring lock "65efc608-6573-4690-8d11-2f0459647d70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.061187] env[62507]: DEBUG oslo_concurrency.lockutils [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "65efc608-6573-4690-8d11-2f0459647d70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.061304] env[62507]: DEBUG oslo_concurrency.lockutils [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "65efc608-6573-4690-8d11-2f0459647d70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.063372] env[62507]: INFO nova.compute.manager [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Terminating instance [ 1768.065080] env[62507]: DEBUG nova.compute.manager [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1768.065280] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1768.065807] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d59f0260-222a-4ffb-a31a-b7c7e2f45e31 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.075206] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c285cad4-a336-4271-bcc8-089f2d3adace {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.086676] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1768.107281] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 65efc608-6573-4690-8d11-2f0459647d70 could not be found. [ 1768.107376] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1768.107484] env[62507]: INFO nova.compute.manager [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1768.107723] env[62507]: DEBUG oslo.service.loopingcall [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.107954] env[62507]: DEBUG nova.compute.manager [-] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1768.108064] env[62507]: DEBUG nova.network.neutron [-] [instance: 65efc608-6573-4690-8d11-2f0459647d70] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1768.131503] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.131762] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.133210] env[62507]: INFO nova.compute.claims [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1768.136357] env[62507]: DEBUG nova.network.neutron [-] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.145863] env[62507]: INFO nova.compute.manager [-] [instance: 65efc608-6573-4690-8d11-2f0459647d70] Took 0.04 seconds to deallocate network for instance. [ 1768.243963] env[62507]: DEBUG oslo_concurrency.lockutils [None req-deaec11c-8a9c-46ce-a158-e710f62bb53e tempest-ServersTestFqdnHostnames-69883058 tempest-ServersTestFqdnHostnames-69883058-project-member] Lock "65efc608-6573-4690-8d11-2f0459647d70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.244662] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "65efc608-6573-4690-8d11-2f0459647d70" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 31.884s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.244845] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 65efc608-6573-4690-8d11-2f0459647d70] During sync_power_state the instance has a pending task (deleting). Skip. [ 1768.245024] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "65efc608-6573-4690-8d11-2f0459647d70" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.366059] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0827277-41ab-4b9b-9c34-d059192511b3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.377067] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfb6ef2-31ac-4fc4-85d2-363ee21f97ea {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.411226] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.412023] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707a65e8-093e-4958-a103-662519415d1d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.419927] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8609e725-e2bf-4616-af84-8e8e769c80e5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.434043] env[62507]: DEBUG nova.compute.provider_tree [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.443398] env[62507]: DEBUG nova.scheduler.client.report [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1768.456385] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.324s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.456702] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1768.507307] env[62507]: DEBUG nova.compute.utils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1768.508647] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1768.508790] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1768.518419] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1768.581082] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1768.584583] env[62507]: DEBUG nova.policy [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec30c348fdca4b22a99ab020dfb776f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b70942ca8c3d422a8d7740aad1324a6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1768.606682] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1768.606918] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1768.607106] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1768.607322] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1768.607475] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1768.607627] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1768.607841] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1768.608036] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1768.608232] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1768.608635] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1768.608635] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1768.609439] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20650b3a-b7a3-4f41-8c55-c10a51532e38 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.617536] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df23d68-80da-4739-b1c6-c13ab4498c58 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.945094] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Successfully created port: 54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1769.386793] env[62507]: DEBUG oslo_concurrency.lockutils [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.823356] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Successfully updated port: 54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1769.844590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "refresh_cache-39471434-14af-468a-8b55-5fc58957e7b6" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.844590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "refresh_cache-39471434-14af-468a-8b55-5fc58957e7b6" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.844590] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1769.890858] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1769.978992] env[62507]: DEBUG nova.compute.manager [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Received event network-vif-plugged-54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1769.979243] env[62507]: DEBUG oslo_concurrency.lockutils [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] Acquiring lock "39471434-14af-468a-8b55-5fc58957e7b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.979420] env[62507]: DEBUG oslo_concurrency.lockutils [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] Lock "39471434-14af-468a-8b55-5fc58957e7b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.979668] env[62507]: DEBUG oslo_concurrency.lockutils [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] Lock "39471434-14af-468a-8b55-5fc58957e7b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.979791] env[62507]: DEBUG nova.compute.manager [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] No waiting events found dispatching network-vif-plugged-54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1769.979902] env[62507]: WARNING nova.compute.manager [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Received unexpected event network-vif-plugged-54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 for instance with vm_state building and task_state spawning. [ 1769.980066] env[62507]: DEBUG nova.compute.manager [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Received event network-changed-54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1769.980628] env[62507]: DEBUG nova.compute.manager [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Refreshing instance network info cache due to event network-changed-54ba8737-1a74-4d7e-b23b-2c8d2dca3c58. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1769.980628] env[62507]: DEBUG oslo_concurrency.lockutils [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] Acquiring lock "refresh_cache-39471434-14af-468a-8b55-5fc58957e7b6" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.057364] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Updating instance_info_cache with network_info: [{"id": "54ba8737-1a74-4d7e-b23b-2c8d2dca3c58", "address": "fa:16:3e:eb:cd:39", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ba8737-1a", "ovs_interfaceid": "54ba8737-1a74-4d7e-b23b-2c8d2dca3c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.069620] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "refresh_cache-39471434-14af-468a-8b55-5fc58957e7b6" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.069853] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance network_info: |[{"id": "54ba8737-1a74-4d7e-b23b-2c8d2dca3c58", "address": "fa:16:3e:eb:cd:39", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ba8737-1a", "ovs_interfaceid": "54ba8737-1a74-4d7e-b23b-2c8d2dca3c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1770.070155] env[62507]: DEBUG oslo_concurrency.lockutils [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] Acquired lock "refresh_cache-39471434-14af-468a-8b55-5fc58957e7b6" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.070341] env[62507]: DEBUG nova.network.neutron [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Refreshing network info cache for port 54ba8737-1a74-4d7e-b23b-2c8d2dca3c58 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1770.071752] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:cd:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54ba8737-1a74-4d7e-b23b-2c8d2dca3c58', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1770.078712] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating folder: Project (b70942ca8c3d422a8d7740aad1324a6b). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1770.081761] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cebcf72c-2468-416b-9a20-494b052e872c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.095701] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Created folder: Project (b70942ca8c3d422a8d7740aad1324a6b) in parent group-v497991. [ 1770.095881] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating folder: Instances. Parent ref: group-v498094. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1770.096145] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0069664-6b61-4b6c-982c-c8b6a821894b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.104673] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Created folder: Instances in parent group-v498094. [ 1770.104893] env[62507]: DEBUG oslo.service.loopingcall [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.105079] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1770.105276] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b57376f1-ffea-4d51-9afb-0089cde867a1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.126155] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1770.126155] env[62507]: value = "task-2460099" [ 1770.126155] env[62507]: _type = "Task" [ 1770.126155] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.133298] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460099, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.343500] env[62507]: DEBUG nova.network.neutron [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Updated VIF entry in instance network info cache for port 54ba8737-1a74-4d7e-b23b-2c8d2dca3c58. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1770.343854] env[62507]: DEBUG nova.network.neutron [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Updating instance_info_cache with network_info: [{"id": "54ba8737-1a74-4d7e-b23b-2c8d2dca3c58", "address": "fa:16:3e:eb:cd:39", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ba8737-1a", "ovs_interfaceid": "54ba8737-1a74-4d7e-b23b-2c8d2dca3c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.353830] env[62507]: DEBUG oslo_concurrency.lockutils [req-28faad80-7a18-4910-bcfd-22ce2321d2ba req-53bac07d-dfbe-4bd5-adb7-7087ad37fe40 service nova] Releasing lock "refresh_cache-39471434-14af-468a-8b55-5fc58957e7b6" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.635699] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460099, 'name': CreateVM_Task, 'duration_secs': 0.313064} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.635699] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1770.636243] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.636408] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.636791] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1770.636978] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b33d38c-41e6-4b4e-9155-8c368ff132c9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.641469] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 1770.641469] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52be31a3-e381-5825-fbd1-a983e9e42e92" [ 1770.641469] env[62507]: _type = "Task" [ 1770.641469] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.648530] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52be31a3-e381-5825-fbd1-a983e9e42e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.151778] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.152143] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1771.152466] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.203779] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.204163] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.204163] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1775.204248] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1775.227632] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.227813] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.227908] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228059] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228212] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228338] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228458] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228577] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228695] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228812] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1775.228973] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1775.229491] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.167598] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.167871] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.167993] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.167621] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.167870] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.168017] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1779.343293] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "425b5171-97c2-4700-ad5f-c79aadb39eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.343727] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "425b5171-97c2-4700-ad5f-c79aadb39eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.167764] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.179787] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.180011] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.180186] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.180353] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1780.181475] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a9a15d-88f9-4d99-9ac9-447eed0bb49f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.190656] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8784114-9a64-47e9-befd-306c3930c990 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.205712] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba410b7d-6e37-443c-9037-0288b28aad07 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.211910] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f2670a-7cbb-4fe1-8793-52bfa219b118 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.240011] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181159MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1780.240164] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.240351] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.309717] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.309872] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310010] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310146] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310268] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310387] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310505] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310624] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310740] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.310856] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.322247] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.332800] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.343035] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.352777] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c5d66c7b-54e4-4a5e-8207-0cadce10c4df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.366157] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.366388] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1780.366535] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1780.537108] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4baaaa4-95d2-40ad-9746-cda7bac2373d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.545838] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c805edb2-326b-4875-8034-a5902d3b723f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.576241] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65ca24d-3980-451f-bc2b-0f5d0575916b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.583603] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061a0263-6553-46d8-9578-602187449a67 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.597460] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.606319] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1780.622543] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1780.622729] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.382s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.863830] env[62507]: DEBUG oslo_concurrency.lockutils [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "1c56d4af-ba43-4141-86d6-880ff384041e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.197226] env[62507]: WARNING oslo_vmware.rw_handles [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1817.197226] env[62507]: ERROR oslo_vmware.rw_handles [ 1817.197864] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1817.200501] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1817.200778] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Copying Virtual Disk [datastore2] vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/770b9ca6-7453-4f6a-a541-3edc29a572d9/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1817.201088] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb5e2400-b169-4de6-8dbf-0969e569a371 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.209268] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 1817.209268] env[62507]: value = "task-2460100" [ 1817.209268] env[62507]: _type = "Task" [ 1817.209268] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.218413] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': task-2460100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.719114] env[62507]: DEBUG oslo_vmware.exceptions [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1817.719447] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.720071] env[62507]: ERROR nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.720071] env[62507]: Faults: ['InvalidArgument'] [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Traceback (most recent call last): [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] yield resources [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self.driver.spawn(context, instance, image_meta, [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self._fetch_image_if_missing(context, vi) [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1817.720071] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] image_cache(vi, tmp_image_ds_loc) [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] vm_util.copy_virtual_disk( [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] session._wait_for_task(vmdk_copy_task) [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] return self.wait_for_task(task_ref) [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] return evt.wait() [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] result = hub.switch() [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] return self.greenlet.switch() [ 1817.720489] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1817.720859] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self.f(*self.args, **self.kw) [ 1817.720859] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1817.720859] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] raise exceptions.translate_fault(task_info.error) [ 1817.720859] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.720859] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Faults: ['InvalidArgument'] [ 1817.720859] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] [ 1817.720859] env[62507]: INFO nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Terminating instance [ 1817.722099] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.722367] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.722558] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a54939eb-0c0c-4ee7-ab53-c246068fd487 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.724963] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1817.725476] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1817.725892] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb4b354-908a-44ae-9cee-5e16ee986dd9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.732672] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1817.732918] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efaf8e8e-bd08-41d6-bd8f-4ed2b34c0863 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.735135] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.735312] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1817.736239] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd22efd-68fd-4eff-bd55-5bdeafe37f03 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.742163] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Waiting for the task: (returnval){ [ 1817.742163] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e92841-efe7-fe0b-26dd-e7bc85fae2a0" [ 1817.742163] env[62507]: _type = "Task" [ 1817.742163] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.748886] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e92841-efe7-fe0b-26dd-e7bc85fae2a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.806271] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1817.806479] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1817.806661] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Deleting the datastore file [datastore2] 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1817.806923] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0cc7b1a-e4d7-473b-8a55-4e17c2d7251c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.814035] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 1817.814035] env[62507]: value = "task-2460102" [ 1817.814035] env[62507]: _type = "Task" [ 1817.814035] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.821897] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': task-2460102, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.252817] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1818.253116] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Creating directory with path [datastore2] vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1818.253316] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11f8b018-9b25-4499-a1f1-f2668f3686c0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.264426] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Created directory with path [datastore2] vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1818.264618] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Fetch image to [datastore2] vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1818.264791] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1818.265498] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7187384-7348-4918-a3f3-cbb6bebb8c32 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.271930] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f730e561-230b-46b3-afce-220a86d903f1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.280766] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7583226d-8211-45df-b3da-03746a1daa3c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.310687] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae105e03-88be-49e4-86be-945c9d349786 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.319235] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c3867e1f-8ff3-49bc-b24e-42368bbb83b2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.323589] env[62507]: DEBUG oslo_vmware.api [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': task-2460102, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080946} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.324129] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1818.324344] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1818.324507] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1818.324682] env[62507]: INFO nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1818.327397] env[62507]: DEBUG nova.compute.claims [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1818.327583] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.327965] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.342753] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1818.401228] env[62507]: DEBUG oslo_vmware.rw_handles [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1818.461782] env[62507]: DEBUG oslo_vmware.rw_handles [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1818.461975] env[62507]: DEBUG oslo_vmware.rw_handles [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1818.598073] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d35eb58-231c-4fbb-92ee-78044ca6726d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.606624] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940e39d2-2cfe-4b3e-b22e-b5a820514d37 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.635240] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b69b01-5c87-4beb-81ae-e164f6697452 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.641842] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ac429d-5580-429d-903a-c80df25d3122 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.654406] env[62507]: DEBUG nova.compute.provider_tree [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.663045] env[62507]: DEBUG nova.scheduler.client.report [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1818.677578] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.350s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.678109] env[62507]: ERROR nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.678109] env[62507]: Faults: ['InvalidArgument'] [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Traceback (most recent call last): [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self.driver.spawn(context, instance, image_meta, [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self._fetch_image_if_missing(context, vi) [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] image_cache(vi, tmp_image_ds_loc) [ 1818.678109] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] vm_util.copy_virtual_disk( [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] session._wait_for_task(vmdk_copy_task) [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] return self.wait_for_task(task_ref) [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] return evt.wait() [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] result = hub.switch() [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] return self.greenlet.switch() [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1818.678525] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] self.f(*self.args, **self.kw) [ 1818.678935] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1818.678935] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] raise exceptions.translate_fault(task_info.error) [ 1818.678935] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.678935] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Faults: ['InvalidArgument'] [ 1818.678935] env[62507]: ERROR nova.compute.manager [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] [ 1818.678935] env[62507]: DEBUG nova.compute.utils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1818.680166] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Build of instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 was re-scheduled: A specified parameter was not correct: fileType [ 1818.680166] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1818.680541] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1818.680719] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1818.680891] env[62507]: DEBUG nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1818.681072] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1818.976234] env[62507]: DEBUG nova.network.neutron [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.991953] env[62507]: INFO nova.compute.manager [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Took 0.31 seconds to deallocate network for instance. [ 1819.088257] env[62507]: INFO nova.scheduler.client.report [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Deleted allocations for instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 [ 1819.110191] env[62507]: DEBUG oslo_concurrency.lockutils [None req-361814bb-6796-47c3-9eb8-b617a176d865 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.751s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.111268] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.107s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.111505] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.111714] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.111887] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.113952] env[62507]: INFO nova.compute.manager [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Terminating instance [ 1819.115616] env[62507]: DEBUG nova.compute.manager [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1819.115815] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1819.116596] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-227ac736-868c-4252-86b1-11b4cd5878b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.126262] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b798d60-9582-46ef-96e4-2c8fb53fb217 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.140034] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1819.162022] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67 could not be found. [ 1819.162022] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1819.162022] env[62507]: INFO nova.compute.manager [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1819.162022] env[62507]: DEBUG oslo.service.loopingcall [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1819.162022] env[62507]: DEBUG nova.compute.manager [-] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1819.162467] env[62507]: DEBUG nova.network.neutron [-] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1819.184786] env[62507]: DEBUG nova.network.neutron [-] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.191730] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.191996] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.193904] env[62507]: INFO nova.compute.claims [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1819.196875] env[62507]: INFO nova.compute.manager [-] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] Took 0.04 seconds to deallocate network for instance. [ 1819.288898] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ac07952-dfdb-42cc-820c-5de0a5c74fbf tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.289996] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 82.928s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.290211] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67] During sync_power_state the instance has a pending task (deleting). Skip. [ 1819.290392] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "4aa3d23e-bb5e-4eeb-ab10-dd708c15ed67" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.410511] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322f8600-4349-434d-9a83-22269ce64372 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.418072] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b415d06c-dfc8-41a3-aa03-b5e44285545c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.448205] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26089bd0-74f7-4e03-9698-9338ecb1b945 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.455077] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12d8c84-01db-4f53-938d-210f7d2ea626 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.469532] env[62507]: DEBUG nova.compute.provider_tree [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.478323] env[62507]: DEBUG nova.scheduler.client.report [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1819.492177] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.492657] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1819.525885] env[62507]: DEBUG nova.compute.utils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.528031] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1819.528199] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1819.537134] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1819.590006] env[62507]: DEBUG nova.policy [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec30c348fdca4b22a99ab020dfb776f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b70942ca8c3d422a8d7740aad1324a6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1819.596614] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1819.621074] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1819.621366] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1819.621552] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1819.621765] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1819.621928] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1819.622097] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1819.622313] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1819.622474] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1819.622642] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1819.622822] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1819.622996] env[62507]: DEBUG nova.virt.hardware [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1819.623840] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cdd4ad-c0de-438f-a247-12f73c22aed7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.631396] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342c674f-b40e-4975-a43f-00b238e7d906 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.891134] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Successfully created port: 0c4acd0c-e804-4711-bcab-1aa17a59513b {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1820.821243] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Successfully updated port: 0c4acd0c-e804-4711-bcab-1aa17a59513b {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1820.834686] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "refresh_cache-16295fdd-45d6-492f-99d9-1006ec42c097" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.835231] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "refresh_cache-16295fdd-45d6-492f-99d9-1006ec42c097" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.835506] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1820.882446] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1821.027791] env[62507]: DEBUG nova.compute.manager [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Received event network-vif-plugged-0c4acd0c-e804-4711-bcab-1aa17a59513b {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1821.028030] env[62507]: DEBUG oslo_concurrency.lockutils [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] Acquiring lock "16295fdd-45d6-492f-99d9-1006ec42c097-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.028243] env[62507]: DEBUG oslo_concurrency.lockutils [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] Lock "16295fdd-45d6-492f-99d9-1006ec42c097-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.028414] env[62507]: DEBUG oslo_concurrency.lockutils [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] Lock "16295fdd-45d6-492f-99d9-1006ec42c097-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.028583] env[62507]: DEBUG nova.compute.manager [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] No waiting events found dispatching network-vif-plugged-0c4acd0c-e804-4711-bcab-1aa17a59513b {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1821.028748] env[62507]: WARNING nova.compute.manager [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Received unexpected event network-vif-plugged-0c4acd0c-e804-4711-bcab-1aa17a59513b for instance with vm_state building and task_state spawning. [ 1821.028910] env[62507]: DEBUG nova.compute.manager [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Received event network-changed-0c4acd0c-e804-4711-bcab-1aa17a59513b {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1821.029100] env[62507]: DEBUG nova.compute.manager [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Refreshing instance network info cache due to event network-changed-0c4acd0c-e804-4711-bcab-1aa17a59513b. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1821.029313] env[62507]: DEBUG oslo_concurrency.lockutils [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] Acquiring lock "refresh_cache-16295fdd-45d6-492f-99d9-1006ec42c097" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.052051] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Updating instance_info_cache with network_info: [{"id": "0c4acd0c-e804-4711-bcab-1aa17a59513b", "address": "fa:16:3e:00:8a:4c", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4acd0c-e8", "ovs_interfaceid": "0c4acd0c-e804-4711-bcab-1aa17a59513b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.062469] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "refresh_cache-16295fdd-45d6-492f-99d9-1006ec42c097" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.062803] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance network_info: |[{"id": "0c4acd0c-e804-4711-bcab-1aa17a59513b", "address": "fa:16:3e:00:8a:4c", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4acd0c-e8", "ovs_interfaceid": "0c4acd0c-e804-4711-bcab-1aa17a59513b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1821.063159] env[62507]: DEBUG oslo_concurrency.lockutils [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] Acquired lock "refresh_cache-16295fdd-45d6-492f-99d9-1006ec42c097" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.063400] env[62507]: DEBUG nova.network.neutron [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Refreshing network info cache for port 0c4acd0c-e804-4711-bcab-1aa17a59513b {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1821.064459] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:8a:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c4acd0c-e804-4711-bcab-1aa17a59513b', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1821.072347] env[62507]: DEBUG oslo.service.loopingcall [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.075179] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1821.075669] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f924bb7-7f44-4d53-8506-10eb4edff886 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.096888] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1821.096888] env[62507]: value = "task-2460103" [ 1821.096888] env[62507]: _type = "Task" [ 1821.096888] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.104782] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460103, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.360413] env[62507]: DEBUG nova.network.neutron [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Updated VIF entry in instance network info cache for port 0c4acd0c-e804-4711-bcab-1aa17a59513b. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1821.360704] env[62507]: DEBUG nova.network.neutron [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Updating instance_info_cache with network_info: [{"id": "0c4acd0c-e804-4711-bcab-1aa17a59513b", "address": "fa:16:3e:00:8a:4c", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c4acd0c-e8", "ovs_interfaceid": "0c4acd0c-e804-4711-bcab-1aa17a59513b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.371017] env[62507]: DEBUG oslo_concurrency.lockutils [req-38438c91-d6d1-4365-b8a6-8abbe43bdd52 req-867b0d82-750f-4783-a9d1-ece5d6010992 service nova] Releasing lock "refresh_cache-16295fdd-45d6-492f-99d9-1006ec42c097" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.607855] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460103, 'name': CreateVM_Task, 'duration_secs': 0.365442} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.608011] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1821.614566] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.614737] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.615055] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1821.615287] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7d6bc32-10b0-49f8-be13-94636003adfe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.619256] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 1821.619256] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52d46df7-b84b-1d6d-55d7-aa0a5ccfaf5b" [ 1821.619256] env[62507]: _type = "Task" [ 1821.619256] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.626297] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52d46df7-b84b-1d6d-55d7-aa0a5ccfaf5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.129886] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.130349] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1822.130349] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.622978] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.164095] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.166717] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.166878] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1837.166998] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1837.193096] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.193273] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.193411] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.193541] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.193669] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.193794] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.193916] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.194052] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.194179] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.194300] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1837.194456] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1837.194953] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.195139] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.168029] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.191094] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.191284] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.191468] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.191616] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1842.168346] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1842.179855] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.180080] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.180243] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.180399] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1842.181559] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4166179-15f3-4050-8511-28bd7925eaeb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.190145] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd0ff8f-4061-49b8-9e69-14091cd1df02 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.203664] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c4963a-b45c-4c2b-a9fe-293a9a403e7a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.209538] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54c8820-088b-45ac-8811-bace927d0304 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.239193] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181160MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1842.239318] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.239487] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.309670] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 01043570-d72d-4a97-8c51-cfe30b25b82b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.309832] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.309958] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310094] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310217] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310336] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310453] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310574] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310691] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.310805] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1842.321256] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1842.331511] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1842.340892] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c5d66c7b-54e4-4a5e-8207-0cadce10c4df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1842.350251] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1842.350473] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1842.350623] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1842.512024] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad698fd-ab09-400e-a8c7-0604996d1fa1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.519651] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b7585f-5f96-4ed8-8a2e-c0b7bb6edec8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.549228] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926d685e-7881-4bef-aa50-709cf3e7deda {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.556446] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9e1bda-415f-4366-bab2-7e398a9d960f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.570147] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.578294] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1842.592766] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1842.592943] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.353s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.893997] env[62507]: WARNING oslo_vmware.rw_handles [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1865.893997] env[62507]: ERROR oslo_vmware.rw_handles [ 1865.894720] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1865.896532] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1865.896801] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Copying Virtual Disk [datastore2] vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/90942d69-78c1-4b4d-bbc6-44816220d70b/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1865.897104] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0779615f-eb00-4e27-b34f-10a8380b0cd0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.905816] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Waiting for the task: (returnval){ [ 1865.905816] env[62507]: value = "task-2460104" [ 1865.905816] env[62507]: _type = "Task" [ 1865.905816] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.913306] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Task: {'id': task-2460104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.416425] env[62507]: DEBUG oslo_vmware.exceptions [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1866.416738] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.417303] env[62507]: ERROR nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1866.417303] env[62507]: Faults: ['InvalidArgument'] [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Traceback (most recent call last): [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] yield resources [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self.driver.spawn(context, instance, image_meta, [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self._fetch_image_if_missing(context, vi) [ 1866.417303] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] image_cache(vi, tmp_image_ds_loc) [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] vm_util.copy_virtual_disk( [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] session._wait_for_task(vmdk_copy_task) [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] return self.wait_for_task(task_ref) [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] return evt.wait() [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] result = hub.switch() [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1866.417720] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] return self.greenlet.switch() [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self.f(*self.args, **self.kw) [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] raise exceptions.translate_fault(task_info.error) [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Faults: ['InvalidArgument'] [ 1866.418198] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] [ 1866.418198] env[62507]: INFO nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Terminating instance [ 1866.419159] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.419380] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.419672] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-981c1280-d8b1-489d-8802-4f6d6ba29344 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.423218] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1866.423435] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1866.424201] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c413411-b503-4430-b8cf-de23ea6eae21 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.428094] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.428270] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1866.429272] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a44953d-5b31-4961-afe0-90e684f4f630 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.433227] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1866.433776] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a745296d-28cf-47bd-b888-bfeaa5bcb837 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.436272] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1866.436272] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e447d0-70b0-7dcc-51d3-39faffebeefd" [ 1866.436272] env[62507]: _type = "Task" [ 1866.436272] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.444439] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e447d0-70b0-7dcc-51d3-39faffebeefd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.501171] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1866.501394] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1866.501626] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Deleting the datastore file [datastore2] 01043570-d72d-4a97-8c51-cfe30b25b82b {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1866.501920] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35e8436c-54ac-488c-b22c-48a34109506a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.508156] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Waiting for the task: (returnval){ [ 1866.508156] env[62507]: value = "task-2460106" [ 1866.508156] env[62507]: _type = "Task" [ 1866.508156] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.515445] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Task: {'id': task-2460106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.947484] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1866.947839] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating directory with path [datastore2] vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.947967] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85ee243d-90a3-4127-9240-3b2992ff02e8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.960239] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created directory with path [datastore2] vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.960421] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Fetch image to [datastore2] vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1866.960592] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1866.961311] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2369624-7da6-4572-a485-227f19a31513 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.967522] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b53fa42-86f0-4007-8293-1308fa6d7cc5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.976484] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03cc04f-024a-4b73-8f3a-d7c7d9c2b57c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.006968] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e5dc19-ad13-4025-a4fc-74efd6b4cd34 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.016997] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-68affb89-50b0-4efb-80f8-35894bac8180 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.018581] env[62507]: DEBUG oslo_vmware.api [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Task: {'id': task-2460106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072721} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.018809] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1867.018980] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1867.019162] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1867.019336] env[62507]: INFO nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1867.021372] env[62507]: DEBUG nova.compute.claims [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1867.021555] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.021777] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.042597] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1867.168688] env[62507]: DEBUG oslo_vmware.rw_handles [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1867.234317] env[62507]: DEBUG oslo_vmware.rw_handles [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1867.234534] env[62507]: DEBUG oslo_vmware.rw_handles [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1867.285778] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4106481a-0209-4341-8bba-a267694c9421 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.293651] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06548d7a-34d9-459c-8efc-4426bf92df4f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.322986] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6680eb10-1158-45bc-b57f-297f4dc89c42 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.330481] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc66945e-6cbb-4ddc-8a9c-8e4bdb7a0ca9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.344684] env[62507]: DEBUG nova.compute.provider_tree [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1867.352909] env[62507]: DEBUG nova.scheduler.client.report [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1867.368251] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.346s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.368869] env[62507]: ERROR nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1867.368869] env[62507]: Faults: ['InvalidArgument'] [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Traceback (most recent call last): [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self.driver.spawn(context, instance, image_meta, [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self._fetch_image_if_missing(context, vi) [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] image_cache(vi, tmp_image_ds_loc) [ 1867.368869] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] vm_util.copy_virtual_disk( [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] session._wait_for_task(vmdk_copy_task) [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] return self.wait_for_task(task_ref) [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] return evt.wait() [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] result = hub.switch() [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] return self.greenlet.switch() [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1867.369548] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] self.f(*self.args, **self.kw) [ 1867.369973] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1867.369973] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] raise exceptions.translate_fault(task_info.error) [ 1867.369973] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1867.369973] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Faults: ['InvalidArgument'] [ 1867.369973] env[62507]: ERROR nova.compute.manager [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] [ 1867.369973] env[62507]: DEBUG nova.compute.utils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1867.370879] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Build of instance 01043570-d72d-4a97-8c51-cfe30b25b82b was re-scheduled: A specified parameter was not correct: fileType [ 1867.370879] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1867.371264] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1867.371439] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1867.371634] env[62507]: DEBUG nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1867.371805] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1867.799685] env[62507]: DEBUG nova.network.neutron [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.809729] env[62507]: INFO nova.compute.manager [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Took 0.44 seconds to deallocate network for instance. [ 1867.906491] env[62507]: INFO nova.scheduler.client.report [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Deleted allocations for instance 01043570-d72d-4a97-8c51-cfe30b25b82b [ 1867.927102] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dbfad42d-0457-4cc0-a12f-7c0e0220ca99 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.366s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.928194] env[62507]: DEBUG oslo_concurrency.lockutils [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 427.886s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.928416] env[62507]: DEBUG oslo_concurrency.lockutils [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Acquiring lock "01043570-d72d-4a97-8c51-cfe30b25b82b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.928623] env[62507]: DEBUG oslo_concurrency.lockutils [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.928790] env[62507]: DEBUG oslo_concurrency.lockutils [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.930777] env[62507]: INFO nova.compute.manager [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Terminating instance [ 1867.932495] env[62507]: DEBUG nova.compute.manager [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1867.932715] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1867.933192] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5eba1511-e0ba-4034-aab0-8fe52c766c60 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.938512] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1867.945015] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac8ddee-1e35-4859-b6de-c7fa201c70d5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.975501] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01043570-d72d-4a97-8c51-cfe30b25b82b could not be found. [ 1867.975877] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1867.975877] env[62507]: INFO nova.compute.manager [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1867.976158] env[62507]: DEBUG oslo.service.loopingcall [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1867.978391] env[62507]: DEBUG nova.compute.manager [-] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1867.978496] env[62507]: DEBUG nova.network.neutron [-] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1867.992721] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.992964] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.994444] env[62507]: INFO nova.compute.claims [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1868.003400] env[62507]: DEBUG nova.network.neutron [-] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.014200] env[62507]: INFO nova.compute.manager [-] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] Took 0.04 seconds to deallocate network for instance. [ 1868.098590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-128eb8c9-d425-4a01-b88e-b30fefc507d8 tempest-ServerActionsTestJSON-1988473056 tempest-ServerActionsTestJSON-1988473056-project-member] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.099889] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 131.738s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.099889] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 01043570-d72d-4a97-8c51-cfe30b25b82b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1868.099889] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "01043570-d72d-4a97-8c51-cfe30b25b82b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.191683] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77f5111-8d7d-4da1-bbca-51f08a97fedd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.200890] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e496a8c0-e2b2-4d12-9197-857df895fd79 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.231776] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9e7f0b-5b78-4d39-bcbc-fdefe60b2730 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.239855] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fafd2931-5cdb-488a-9394-15238d0d9f84 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.254118] env[62507]: DEBUG nova.compute.provider_tree [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.266029] env[62507]: DEBUG nova.scheduler.client.report [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1868.280321] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.280770] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1868.317149] env[62507]: DEBUG nova.compute.utils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1868.318501] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1868.318501] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1868.330800] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1868.381744] env[62507]: DEBUG nova.policy [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '751698c254a140919588ea005a5e586d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e8135bf41224c058bca7f453921f08c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1868.394133] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1868.418980] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1868.419227] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1868.419386] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1868.419583] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1868.419732] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1868.419876] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1868.420092] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1868.420258] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1868.420426] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1868.420588] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1868.420760] env[62507]: DEBUG nova.virt.hardware [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1868.421640] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fa110e-e4f0-4420-afcf-01477a22e1b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.429578] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fa2b39-268a-42df-8823-c859e0b1b0f0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.768224] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Successfully created port: ae3c40b4-5355-4793-9f58-458b5b18ce5b {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1869.644093] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Successfully updated port: ae3c40b4-5355-4793-9f58-458b5b18ce5b {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1869.655318] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "refresh_cache-a76d0987-29c2-423b-972c-990639986d5f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.655495] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "refresh_cache-a76d0987-29c2-423b-972c-990639986d5f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.655697] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1869.693855] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1869.869254] env[62507]: DEBUG nova.compute.manager [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Received event network-vif-plugged-ae3c40b4-5355-4793-9f58-458b5b18ce5b {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1869.869476] env[62507]: DEBUG oslo_concurrency.lockutils [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] Acquiring lock "a76d0987-29c2-423b-972c-990639986d5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.870573] env[62507]: DEBUG oslo_concurrency.lockutils [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] Lock "a76d0987-29c2-423b-972c-990639986d5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.870824] env[62507]: DEBUG oslo_concurrency.lockutils [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] Lock "a76d0987-29c2-423b-972c-990639986d5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.871047] env[62507]: DEBUG nova.compute.manager [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] No waiting events found dispatching network-vif-plugged-ae3c40b4-5355-4793-9f58-458b5b18ce5b {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1869.871274] env[62507]: WARNING nova.compute.manager [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Received unexpected event network-vif-plugged-ae3c40b4-5355-4793-9f58-458b5b18ce5b for instance with vm_state building and task_state spawning. [ 1869.871472] env[62507]: DEBUG nova.compute.manager [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Received event network-changed-ae3c40b4-5355-4793-9f58-458b5b18ce5b {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1869.871698] env[62507]: DEBUG nova.compute.manager [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Refreshing instance network info cache due to event network-changed-ae3c40b4-5355-4793-9f58-458b5b18ce5b. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1869.871903] env[62507]: DEBUG oslo_concurrency.lockutils [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] Acquiring lock "refresh_cache-a76d0987-29c2-423b-972c-990639986d5f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.915750] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Updating instance_info_cache with network_info: [{"id": "ae3c40b4-5355-4793-9f58-458b5b18ce5b", "address": "fa:16:3e:ae:ae:80", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae3c40b4-53", "ovs_interfaceid": "ae3c40b4-5355-4793-9f58-458b5b18ce5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.929319] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "refresh_cache-a76d0987-29c2-423b-972c-990639986d5f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.929912] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance network_info: |[{"id": "ae3c40b4-5355-4793-9f58-458b5b18ce5b", "address": "fa:16:3e:ae:ae:80", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae3c40b4-53", "ovs_interfaceid": "ae3c40b4-5355-4793-9f58-458b5b18ce5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1869.929912] env[62507]: DEBUG oslo_concurrency.lockutils [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] Acquired lock "refresh_cache-a76d0987-29c2-423b-972c-990639986d5f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.930109] env[62507]: DEBUG nova.network.neutron [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Refreshing network info cache for port ae3c40b4-5355-4793-9f58-458b5b18ce5b {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1869.932218] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:ae:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae3c40b4-5355-4793-9f58-458b5b18ce5b', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1869.940538] env[62507]: DEBUG oslo.service.loopingcall [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.943637] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a76d0987-29c2-423b-972c-990639986d5f] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1869.944406] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17c0e15a-bd61-4209-bbd9-fdf6ccec9168 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.965849] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1869.965849] env[62507]: value = "task-2460107" [ 1869.965849] env[62507]: _type = "Task" [ 1869.965849] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.973881] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460107, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.219511] env[62507]: DEBUG nova.network.neutron [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Updated VIF entry in instance network info cache for port ae3c40b4-5355-4793-9f58-458b5b18ce5b. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1870.220222] env[62507]: DEBUG nova.network.neutron [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] [instance: a76d0987-29c2-423b-972c-990639986d5f] Updating instance_info_cache with network_info: [{"id": "ae3c40b4-5355-4793-9f58-458b5b18ce5b", "address": "fa:16:3e:ae:ae:80", "network": {"id": "b0f6036c-0ac1-413b-85a4-8e1a827463a0", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1523944788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e8135bf41224c058bca7f453921f08c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae3c40b4-53", "ovs_interfaceid": "ae3c40b4-5355-4793-9f58-458b5b18ce5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.231152] env[62507]: DEBUG oslo_concurrency.lockutils [req-571e53d9-b62b-4f08-8d9d-adf7870d45ba req-f0248332-070e-43aa-8cf4-7c83fe15babd service nova] Releasing lock "refresh_cache-a76d0987-29c2-423b-972c-990639986d5f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.476607] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460107, 'name': CreateVM_Task, 'duration_secs': 0.304699} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.476843] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a76d0987-29c2-423b-972c-990639986d5f] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1870.477510] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.477683] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.478037] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1870.478284] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5bedf37-104b-4e50-bc72-f82fbfd1a229 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.482440] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1870.482440] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52242fe4-99b7-c3b9-8590-190f24dbe1cf" [ 1870.482440] env[62507]: _type = "Task" [ 1870.482440] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.489703] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52242fe4-99b7-c3b9-8590-190f24dbe1cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.861863] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "16295fdd-45d6-492f-99d9-1006ec42c097" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.988412] env[62507]: DEBUG oslo_concurrency.lockutils [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "39471434-14af-468a-8b55-5fc58957e7b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.994644] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.994858] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1870.995076] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.880419] env[62507]: DEBUG oslo_concurrency.lockutils [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "a76d0987-29c2-423b-972c-990639986d5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.593493] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.163853] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1898.168891] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1898.168891] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1898.169416] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1898.190990] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191230] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191290] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191419] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191541] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191663] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191789] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.191908] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.192036] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.192158] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1898.192276] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1899.168059] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.168237] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.168503] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.168612] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.168659] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1900.168767] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.168643] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.180297] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.180513] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.180681] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.180836] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1902.182051] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4be9c8e-c5e9-4f23-86ac-2b6acf4bb8b3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.191303] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401be07a-7f5d-40f3-8c05-a0e187813d32 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.206556] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae54462-79aa-4e12-accf-b92ce9555303 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.212858] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304b141d-0a50-4fc2-a89c-63144e646065 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.241906] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181173MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1902.242075] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.242278] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.314018] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance ef5633ea-273d-429f-9a02-326711b73bab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314018] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314185] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314280] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314372] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314489] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314603] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314718] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314831] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.314943] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1902.327407] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1902.338000] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance c5d66c7b-54e4-4a5e-8207-0cadce10c4df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1902.347833] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1902.348082] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1902.348239] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1902.488533] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8efffe-2394-4681-bc12-58632e6bc3ed {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.496502] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061e0cf6-78d4-4c8c-a365-a0d3a64508d1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.525802] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c58217-a8fc-4cc1-aaee-01518f254de0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.532366] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c571a0-02ac-4e9a-ad4f-eac20bcc7d80 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.545482] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1902.554040] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1902.567388] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1902.567534] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.325s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.230623] env[62507]: WARNING oslo_vmware.rw_handles [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1917.230623] env[62507]: ERROR oslo_vmware.rw_handles [ 1917.231393] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1917.234175] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1917.234416] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Copying Virtual Disk [datastore2] vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/2d93dedf-225b-43e6-8ab5-bcbc6dc32a4f/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1917.234723] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d80adcba-6718-4a82-81dd-4eb7be5267d6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.242487] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1917.242487] env[62507]: value = "task-2460108" [ 1917.242487] env[62507]: _type = "Task" [ 1917.242487] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.250018] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.752858] env[62507]: DEBUG oslo_vmware.exceptions [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1917.753118] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.753685] env[62507]: ERROR nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.753685] env[62507]: Faults: ['InvalidArgument'] [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] Traceback (most recent call last): [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] yield resources [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self.driver.spawn(context, instance, image_meta, [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self._fetch_image_if_missing(context, vi) [ 1917.753685] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] image_cache(vi, tmp_image_ds_loc) [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] vm_util.copy_virtual_disk( [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] session._wait_for_task(vmdk_copy_task) [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] return self.wait_for_task(task_ref) [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] return evt.wait() [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] result = hub.switch() [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1917.754135] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] return self.greenlet.switch() [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self.f(*self.args, **self.kw) [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] raise exceptions.translate_fault(task_info.error) [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] Faults: ['InvalidArgument'] [ 1917.754591] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] [ 1917.754591] env[62507]: INFO nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Terminating instance [ 1917.755636] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.755848] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1917.756108] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04cc9986-540f-4190-b131-97aedea4d9ca {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.758361] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1917.758558] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1917.759278] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b28e79c-afcb-4bfc-819e-b4a40c17f8fc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.765618] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1917.765818] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-875fa3da-aabb-4e37-a181-e475adbc13f6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.767814] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1917.767987] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1917.768897] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-933e0b4a-a28d-49c2-bd79-602ed1e137eb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.773955] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1917.773955] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e78280-ac45-df6e-94af-a17a16bd3c87" [ 1917.773955] env[62507]: _type = "Task" [ 1917.773955] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.782098] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e78280-ac45-df6e-94af-a17a16bd3c87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.829897] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1917.830140] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1917.830316] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleting the datastore file [datastore2] ef5633ea-273d-429f-9a02-326711b73bab {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1917.830568] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a34d2e89-5581-4562-8398-eeb78fa8b6a2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.836560] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 1917.836560] env[62507]: value = "task-2460110" [ 1917.836560] env[62507]: _type = "Task" [ 1917.836560] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.843678] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.284638] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1918.284979] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating directory with path [datastore2] vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1918.285084] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18a8ad87-1a8f-4a2a-8514-b224aa306a0c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.295441] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created directory with path [datastore2] vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1918.295626] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Fetch image to [datastore2] vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1918.295798] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1918.296507] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e18149-5d2b-4b6a-9866-ac6633f3f1fc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.302735] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95d5109-ba41-40b3-becc-1bed950a5179 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.311261] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67473b5-8c48-4859-b236-6aa3e78ff058 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.344635] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9836c78d-60b6-43e7-bac8-779026b5397a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.351771] env[62507]: DEBUG oslo_vmware.api [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073124} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.353186] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1918.353375] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1918.353549] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1918.353725] env[62507]: INFO nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1918.355487] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c77695c7-0559-4f8c-b1b7-5919860a7be4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.357311] env[62507]: DEBUG nova.compute.claims [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1918.357485] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.357694] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.380140] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1918.432912] env[62507]: DEBUG oslo_vmware.rw_handles [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1918.495178] env[62507]: DEBUG oslo_vmware.rw_handles [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1918.495442] env[62507]: DEBUG oslo_vmware.rw_handles [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1918.602024] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c640b8a4-54cd-45d1-89b9-e1d508674ad8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.609268] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f80734b-fc2a-4d4b-9370-5985d8f6d692 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.638501] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc2504b-4f29-4661-b48b-a61db597c545 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.645214] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ddb678-4c5d-43bb-8abc-25884015d1dc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.657730] env[62507]: DEBUG nova.compute.provider_tree [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.666444] env[62507]: DEBUG nova.scheduler.client.report [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1918.681754] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.682340] env[62507]: ERROR nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1918.682340] env[62507]: Faults: ['InvalidArgument'] [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] Traceback (most recent call last): [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self.driver.spawn(context, instance, image_meta, [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self._fetch_image_if_missing(context, vi) [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] image_cache(vi, tmp_image_ds_loc) [ 1918.682340] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] vm_util.copy_virtual_disk( [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] session._wait_for_task(vmdk_copy_task) [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] return self.wait_for_task(task_ref) [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] return evt.wait() [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] result = hub.switch() [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] return self.greenlet.switch() [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1918.682719] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] self.f(*self.args, **self.kw) [ 1918.683094] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1918.683094] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] raise exceptions.translate_fault(task_info.error) [ 1918.683094] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1918.683094] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] Faults: ['InvalidArgument'] [ 1918.683094] env[62507]: ERROR nova.compute.manager [instance: ef5633ea-273d-429f-9a02-326711b73bab] [ 1918.683094] env[62507]: DEBUG nova.compute.utils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1918.684419] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Build of instance ef5633ea-273d-429f-9a02-326711b73bab was re-scheduled: A specified parameter was not correct: fileType [ 1918.684419] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1918.684781] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1918.684954] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1918.685139] env[62507]: DEBUG nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1918.685306] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1918.945107] env[62507]: DEBUG nova.network.neutron [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.960019] env[62507]: INFO nova.compute.manager [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Took 0.27 seconds to deallocate network for instance. [ 1919.743660] env[62507]: INFO nova.scheduler.client.report [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleted allocations for instance ef5633ea-273d-429f-9a02-326711b73bab [ 1919.763227] env[62507]: DEBUG oslo_concurrency.lockutils [None req-0dd7dca0-6108-4cd4-b471-00d946e48f9b tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "ef5633ea-273d-429f-9a02-326711b73bab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.055s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.764306] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "ef5633ea-273d-429f-9a02-326711b73bab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.731s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.764531] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "ef5633ea-273d-429f-9a02-326711b73bab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.764736] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "ef5633ea-273d-429f-9a02-326711b73bab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.764910] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "ef5633ea-273d-429f-9a02-326711b73bab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.766805] env[62507]: INFO nova.compute.manager [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Terminating instance [ 1919.768482] env[62507]: DEBUG nova.compute.manager [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1919.768690] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1919.769198] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e83cd435-8a59-490a-9361-18057c980c0f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.774915] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1919.781014] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9f6104-e914-4353-bdff-c100b2fa970e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.809064] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef5633ea-273d-429f-9a02-326711b73bab could not be found. [ 1919.809289] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1919.809464] env[62507]: INFO nova.compute.manager [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1919.809705] env[62507]: DEBUG oslo.service.loopingcall [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.811895] env[62507]: DEBUG nova.compute.manager [-] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1919.812021] env[62507]: DEBUG nova.network.neutron [-] [instance: ef5633ea-273d-429f-9a02-326711b73bab] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1919.825344] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.825576] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.826996] env[62507]: INFO nova.compute.claims [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1919.838971] env[62507]: DEBUG nova.network.neutron [-] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.858478] env[62507]: INFO nova.compute.manager [-] [instance: ef5633ea-273d-429f-9a02-326711b73bab] Took 0.05 seconds to deallocate network for instance. [ 1919.942863] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9bea71b-d43d-406a-a2d4-e88cc9e1065c tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "ef5633ea-273d-429f-9a02-326711b73bab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.943843] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "ef5633ea-273d-429f-9a02-326711b73bab" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 183.582s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.944065] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: ef5633ea-273d-429f-9a02-326711b73bab] During sync_power_state the instance has a pending task (deleting). Skip. [ 1919.944254] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "ef5633ea-273d-429f-9a02-326711b73bab" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.018543] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69107a07-6bc8-4c6a-9b94-af28f9f6e427 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.027153] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e293528-6d0c-4466-8642-a069e3084622 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.055419] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03346e83-de76-4b80-ae19-39f625e3d8cf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.061932] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cb948b-9e00-46e0-9290-974445497f7b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.075036] env[62507]: DEBUG nova.compute.provider_tree [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1920.083582] env[62507]: DEBUG nova.scheduler.client.report [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1920.097699] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.272s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.098169] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1920.130525] env[62507]: DEBUG nova.compute.utils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1920.132051] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1920.132330] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1920.143803] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1920.194491] env[62507]: DEBUG nova.policy [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df0b12531a3e46e4a97a8d4082d6868e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13cb14d09e6f4d84996e4470f4e24eeb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1920.203772] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1920.231210] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1920.231475] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1920.231633] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1920.231814] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1920.231990] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1920.232194] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1920.232417] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1920.232599] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1920.232818] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1920.233192] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1920.233192] env[62507]: DEBUG nova.virt.hardware [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1920.234117] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e88704-c335-43e9-ba9d-be4446bcc397 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.242570] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60ed266-86c4-4a01-a328-3a42ae7383d5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.556893] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Successfully created port: 5d1e2ada-8d7a-46bf-a6d9-90050956c19a {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1921.053589] env[62507]: DEBUG nova.compute.manager [req-eb7d5c82-3c62-4817-bead-bab516b872fa req-a0b4a437-3ee9-4731-bbed-0e791dea738a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Received event network-vif-plugged-5d1e2ada-8d7a-46bf-a6d9-90050956c19a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1921.053838] env[62507]: DEBUG oslo_concurrency.lockutils [req-eb7d5c82-3c62-4817-bead-bab516b872fa req-a0b4a437-3ee9-4731-bbed-0e791dea738a service nova] Acquiring lock "56c176ec-c6e5-4f48-a5be-badef25c5667-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.054018] env[62507]: DEBUG oslo_concurrency.lockutils [req-eb7d5c82-3c62-4817-bead-bab516b872fa req-a0b4a437-3ee9-4731-bbed-0e791dea738a service nova] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.054200] env[62507]: DEBUG oslo_concurrency.lockutils [req-eb7d5c82-3c62-4817-bead-bab516b872fa req-a0b4a437-3ee9-4731-bbed-0e791dea738a service nova] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.054365] env[62507]: DEBUG nova.compute.manager [req-eb7d5c82-3c62-4817-bead-bab516b872fa req-a0b4a437-3ee9-4731-bbed-0e791dea738a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] No waiting events found dispatching network-vif-plugged-5d1e2ada-8d7a-46bf-a6d9-90050956c19a {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1921.054526] env[62507]: WARNING nova.compute.manager [req-eb7d5c82-3c62-4817-bead-bab516b872fa req-a0b4a437-3ee9-4731-bbed-0e791dea738a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Received unexpected event network-vif-plugged-5d1e2ada-8d7a-46bf-a6d9-90050956c19a for instance with vm_state building and task_state spawning. [ 1921.135034] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Successfully updated port: 5d1e2ada-8d7a-46bf-a6d9-90050956c19a {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1921.146045] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "refresh_cache-56c176ec-c6e5-4f48-a5be-badef25c5667" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.146267] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "refresh_cache-56c176ec-c6e5-4f48-a5be-badef25c5667" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.146431] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1921.188517] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1921.358638] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Updating instance_info_cache with network_info: [{"id": "5d1e2ada-8d7a-46bf-a6d9-90050956c19a", "address": "fa:16:3e:dd:96:6f", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e2ada-8d", "ovs_interfaceid": "5d1e2ada-8d7a-46bf-a6d9-90050956c19a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.371648] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "refresh_cache-56c176ec-c6e5-4f48-a5be-badef25c5667" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.371648] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance network_info: |[{"id": "5d1e2ada-8d7a-46bf-a6d9-90050956c19a", "address": "fa:16:3e:dd:96:6f", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e2ada-8d", "ovs_interfaceid": "5d1e2ada-8d7a-46bf-a6d9-90050956c19a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1921.371916] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:96:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d1e2ada-8d7a-46bf-a6d9-90050956c19a', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1921.379321] env[62507]: DEBUG oslo.service.loopingcall [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1921.379794] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1921.380075] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-796eb101-ac48-4fa4-9f5e-5f7fd679ef0b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.400474] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1921.400474] env[62507]: value = "task-2460111" [ 1921.400474] env[62507]: _type = "Task" [ 1921.400474] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.408410] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460111, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.911625] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460111, 'name': CreateVM_Task, 'duration_secs': 0.280285} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.911754] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1921.912529] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.912698] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.913063] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1921.913408] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b369be01-6668-4fa9-b3ea-73179c22455e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.917536] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1921.917536] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f13aff-307b-cd7d-a504-3c7a64334cf6" [ 1921.917536] env[62507]: _type = "Task" [ 1921.917536] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.924840] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f13aff-307b-cd7d-a504-3c7a64334cf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.429238] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.429564] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1922.429862] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.082633] env[62507]: DEBUG nova.compute.manager [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Received event network-changed-5d1e2ada-8d7a-46bf-a6d9-90050956c19a {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1923.082866] env[62507]: DEBUG nova.compute.manager [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Refreshing instance network info cache due to event network-changed-5d1e2ada-8d7a-46bf-a6d9-90050956c19a. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1923.083106] env[62507]: DEBUG oslo_concurrency.lockutils [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] Acquiring lock "refresh_cache-56c176ec-c6e5-4f48-a5be-badef25c5667" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.083259] env[62507]: DEBUG oslo_concurrency.lockutils [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] Acquired lock "refresh_cache-56c176ec-c6e5-4f48-a5be-badef25c5667" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.083423] env[62507]: DEBUG nova.network.neutron [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Refreshing network info cache for port 5d1e2ada-8d7a-46bf-a6d9-90050956c19a {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1923.326514] env[62507]: DEBUG nova.network.neutron [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Updated VIF entry in instance network info cache for port 5d1e2ada-8d7a-46bf-a6d9-90050956c19a. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1923.326894] env[62507]: DEBUG nova.network.neutron [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Updating instance_info_cache with network_info: [{"id": "5d1e2ada-8d7a-46bf-a6d9-90050956c19a", "address": "fa:16:3e:dd:96:6f", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d1e2ada-8d", "ovs_interfaceid": "5d1e2ada-8d7a-46bf-a6d9-90050956c19a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.335954] env[62507]: DEBUG oslo_concurrency.lockutils [req-c6d4443a-315e-4e11-9375-e2c210c5ecc0 req-0ced8650-b06e-4e5c-8f8a-88294f4c797a service nova] Releasing lock "refresh_cache-56c176ec-c6e5-4f48-a5be-badef25c5667" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.420389] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "56c176ec-c6e5-4f48-a5be-badef25c5667" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.567553] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.163016] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.170497] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.170691] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1958.170826] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1958.192647] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.192866] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193028] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193159] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193286] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193407] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193528] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193647] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193762] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193876] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1958.193994] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1959.167606] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.191553] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.191553] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.191553] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1959.668380] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquiring lock "479a1e78-23c0-4a96-aa72-aa419c8c251b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.668652] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Lock "479a1e78-23c0-4a96-aa72-aa419c8c251b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.168248] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.168504] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.167372] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.167631] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.178906] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.179179] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.179278] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.179437] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1962.180646] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6448792-c752-485e-8cdd-62dcb53ac3b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.189475] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b3378d-309e-4ee5-b3f8-b86320f89200 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.202768] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b91b7e9-270a-43b6-b4bb-b197219d2472 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.208600] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e090d495-191e-442b-966d-3f4e7d3ef7d5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.237722] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181161MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1962.237871] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.238076] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.309783] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.309955] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 637de77e-d142-45ca-8a4e-3bf365e31502 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310101] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310254] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310384] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310506] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310622] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310736] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310850] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.310962] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1962.321408] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1962.331554] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1962.331762] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1962.331907] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1962.467360] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e2c01e-5c43-4582-8439-d94b939b327b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.474168] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b659ea56-2def-413a-90f1-50b6e1612df1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.504804] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca80591-01bd-46e5-a8ca-b81bcf0d5eaa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.512113] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae87fd6-4ed6-4e53-854b-519430f899df {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.524942] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1962.532948] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1962.548844] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1962.549040] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.311s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.165983] env[62507]: WARNING oslo_vmware.rw_handles [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.165983] env[62507]: ERROR oslo_vmware.rw_handles [ 1967.166575] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1967.168707] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1967.168946] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Copying Virtual Disk [datastore2] vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/5fc16394-2bbc-4feb-afb4-387e3ee54670/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1967.169266] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e07e3885-c8bc-415c-a742-123b72559659 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.176904] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1967.176904] env[62507]: value = "task-2460112" [ 1967.176904] env[62507]: _type = "Task" [ 1967.176904] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.184413] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.687486] env[62507]: DEBUG oslo_vmware.exceptions [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1967.687805] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.688208] env[62507]: ERROR nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.688208] env[62507]: Faults: ['InvalidArgument'] [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Traceback (most recent call last): [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] yield resources [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self.driver.spawn(context, instance, image_meta, [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self._fetch_image_if_missing(context, vi) [ 1967.688208] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] image_cache(vi, tmp_image_ds_loc) [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] vm_util.copy_virtual_disk( [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] session._wait_for_task(vmdk_copy_task) [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] return self.wait_for_task(task_ref) [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] return evt.wait() [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] result = hub.switch() [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.688534] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] return self.greenlet.switch() [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self.f(*self.args, **self.kw) [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] raise exceptions.translate_fault(task_info.error) [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Faults: ['InvalidArgument'] [ 1967.688916] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] [ 1967.688916] env[62507]: INFO nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Terminating instance [ 1967.690131] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.690349] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1967.690590] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f4c37bc-516c-4191-a5b3-5c5b113c8c59 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.692713] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1967.692902] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1967.693682] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e8010c-20c4-4ee6-be4d-2bd7fb9dfb56 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.700060] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1967.700264] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-298f2a3a-328f-4b09-8a4d-e64ec848e3c6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.702306] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1967.702482] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1967.703390] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc46abd7-0a12-494a-bb98-30cb312fc790 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.708093] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 1967.708093] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529ba86b-fd18-2595-b0a7-33a4f285937e" [ 1967.708093] env[62507]: _type = "Task" [ 1967.708093] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.718631] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529ba86b-fd18-2595-b0a7-33a4f285937e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.773538] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1967.773758] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1967.773942] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleting the datastore file [datastore2] 7ff089f8-f304-4c2e-bf3d-16997fe8968c {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1967.774209] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dc1ad31-4e08-4862-a212-784c39d7cba3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.780041] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 1967.780041] env[62507]: value = "task-2460114" [ 1967.780041] env[62507]: _type = "Task" [ 1967.780041] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.787656] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.218830] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1968.219170] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating directory with path [datastore2] vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1968.219402] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f130d00c-a04e-468f-907c-29222d94ef64 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.232182] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Created directory with path [datastore2] vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1968.232417] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Fetch image to [datastore2] vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1968.232603] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1968.233377] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de7584b-ee15-471f-b9f7-0e4969db589f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.240104] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a67578a-8f8b-47a4-a7fd-794f17d3d148 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.248860] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab53771f-67ee-4f7a-953e-9be1db73dad9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.280202] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb11ce9-1253-496d-9329-d423e9cb1061 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.291474] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4a67a0a4-b874-42b7-98d1-89037462a5c7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.293124] env[62507]: DEBUG oslo_vmware.api [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072035} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.293557] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1968.293746] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1968.293920] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1968.294113] env[62507]: INFO nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1968.296230] env[62507]: DEBUG nova.compute.claims [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1968.296433] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.296651] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.315209] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1968.368522] env[62507]: DEBUG oslo_vmware.rw_handles [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1968.429643] env[62507]: DEBUG oslo_vmware.rw_handles [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1968.429834] env[62507]: DEBUG oslo_vmware.rw_handles [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1968.543045] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c2cb1d-bdd1-4c90-b9de-64613062f8cf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.550521] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca77adc0-72d8-46c7-bdec-d30bedd8187a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.580810] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f6f4d4-3871-4365-8678-f123aa13a4b5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.587742] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e976c5f8-285d-4da0-aaa8-c388b00ca89f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.600288] env[62507]: DEBUG nova.compute.provider_tree [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.608464] env[62507]: DEBUG nova.scheduler.client.report [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1968.625227] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.625754] env[62507]: ERROR nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1968.625754] env[62507]: Faults: ['InvalidArgument'] [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Traceback (most recent call last): [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self.driver.spawn(context, instance, image_meta, [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self._fetch_image_if_missing(context, vi) [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] image_cache(vi, tmp_image_ds_loc) [ 1968.625754] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] vm_util.copy_virtual_disk( [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] session._wait_for_task(vmdk_copy_task) [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] return self.wait_for_task(task_ref) [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] return evt.wait() [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] result = hub.switch() [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] return self.greenlet.switch() [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1968.626080] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] self.f(*self.args, **self.kw) [ 1968.626385] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1968.626385] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] raise exceptions.translate_fault(task_info.error) [ 1968.626385] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1968.626385] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Faults: ['InvalidArgument'] [ 1968.626385] env[62507]: ERROR nova.compute.manager [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] [ 1968.626508] env[62507]: DEBUG nova.compute.utils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1968.627880] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Build of instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c was re-scheduled: A specified parameter was not correct: fileType [ 1968.627880] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1968.628266] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1968.628442] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1968.628619] env[62507]: DEBUG nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1968.628782] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1968.883232] env[62507]: DEBUG nova.network.neutron [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.895379] env[62507]: INFO nova.compute.manager [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Took 0.27 seconds to deallocate network for instance. [ 1968.985417] env[62507]: INFO nova.scheduler.client.report [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleted allocations for instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c [ 1969.007351] env[62507]: DEBUG oslo_concurrency.lockutils [None req-fa807356-0696-4c02-b7dd-698b61856d77 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 610.470s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.008522] env[62507]: DEBUG oslo_concurrency.lockutils [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.060s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.008746] env[62507]: DEBUG oslo_concurrency.lockutils [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.008952] env[62507]: DEBUG oslo_concurrency.lockutils [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.009136] env[62507]: DEBUG oslo_concurrency.lockutils [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.011096] env[62507]: INFO nova.compute.manager [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Terminating instance [ 1969.012806] env[62507]: DEBUG nova.compute.manager [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1969.013008] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1969.013561] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a74c1fe6-bc5c-4709-b4f3-b380bfee4acc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.018520] env[62507]: DEBUG nova.compute.manager [None req-7e95a454-32d3-44f4-a7be-9cfef5324496 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: c5d66c7b-54e4-4a5e-8207-0cadce10c4df] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1969.025056] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6dc419-907e-46c9-83ed-dfb5a32273c3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.041260] env[62507]: DEBUG nova.compute.manager [None req-7e95a454-32d3-44f4-a7be-9cfef5324496 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: c5d66c7b-54e4-4a5e-8207-0cadce10c4df] Instance disappeared before build. {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1969.052615] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ff089f8-f304-4c2e-bf3d-16997fe8968c could not be found. [ 1969.052801] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1969.052976] env[62507]: INFO nova.compute.manager [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1969.053247] env[62507]: DEBUG oslo.service.loopingcall [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.053467] env[62507]: DEBUG nova.compute.manager [-] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1969.053560] env[62507]: DEBUG nova.network.neutron [-] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1969.068332] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7e95a454-32d3-44f4-a7be-9cfef5324496 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "c5d66c7b-54e4-4a5e-8207-0cadce10c4df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.831s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.077781] env[62507]: DEBUG nova.network.neutron [-] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.078990] env[62507]: DEBUG nova.compute.manager [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1969.085517] env[62507]: INFO nova.compute.manager [-] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] Took 0.03 seconds to deallocate network for instance. [ 1969.130704] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.130959] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.132448] env[62507]: INFO nova.compute.claims [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1969.180255] env[62507]: DEBUG oslo_concurrency.lockutils [None req-10039c40-2b9c-4852-aeb4-44a69990a8c3 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.181667] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 232.820s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.181865] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7ff089f8-f304-4c2e-bf3d-16997fe8968c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1969.182102] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "7ff089f8-f304-4c2e-bf3d-16997fe8968c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.307647] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a626d6-8988-4076-8a74-02cd3b85e645 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.314714] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc49ea7-9772-4f9e-9d18-313f0b4b9b50 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.344362] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e726b1a-a3a1-4ff0-b5d2-e46698ce1482 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.351383] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7589c355-a3fb-48a6-a8aa-6b9836c966f8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.365018] env[62507]: DEBUG nova.compute.provider_tree [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.373520] env[62507]: DEBUG nova.scheduler.client.report [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1969.389615] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.390121] env[62507]: DEBUG nova.compute.manager [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1969.423541] env[62507]: DEBUG nova.compute.utils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1969.425028] env[62507]: DEBUG nova.compute.manager [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1969.425560] env[62507]: DEBUG nova.network.neutron [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1969.435272] env[62507]: DEBUG nova.compute.manager [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1969.489719] env[62507]: DEBUG nova.policy [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a05f77d144740b0a37ca55fe163a511', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '24f3f26978fa490fa5fe8dcd8573c61e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 1969.505182] env[62507]: DEBUG nova.compute.manager [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1969.531108] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1969.531352] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1969.531511] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1969.531834] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1969.531834] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1969.531972] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1969.532209] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1969.532371] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1969.532543] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1969.532706] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1969.532964] env[62507]: DEBUG nova.virt.hardware [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1969.533747] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a41ba1-7476-4f33-860d-24e83264cff4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.541312] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b27f6ce-8afb-4983-8298-a1f85813f4ae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.829536] env[62507]: DEBUG nova.network.neutron [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Successfully created port: d9d7fbb8-6b13-4071-9abf-894da00ab4ff {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1970.411021] env[62507]: DEBUG nova.network.neutron [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Successfully updated port: d9d7fbb8-6b13-4071-9abf-894da00ab4ff {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1970.421575] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "refresh_cache-425b5171-97c2-4700-ad5f-c79aadb39eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.421882] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "refresh_cache-425b5171-97c2-4700-ad5f-c79aadb39eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.422165] env[62507]: DEBUG nova.network.neutron [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1970.467120] env[62507]: DEBUG nova.network.neutron [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1970.658841] env[62507]: DEBUG nova.network.neutron [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Updating instance_info_cache with network_info: [{"id": "d9d7fbb8-6b13-4071-9abf-894da00ab4ff", "address": "fa:16:3e:66:c1:ec", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9d7fbb8-6b", "ovs_interfaceid": "d9d7fbb8-6b13-4071-9abf-894da00ab4ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.673320] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "refresh_cache-425b5171-97c2-4700-ad5f-c79aadb39eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.673606] env[62507]: DEBUG nova.compute.manager [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Instance network_info: |[{"id": "d9d7fbb8-6b13-4071-9abf-894da00ab4ff", "address": "fa:16:3e:66:c1:ec", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9d7fbb8-6b", "ovs_interfaceid": "d9d7fbb8-6b13-4071-9abf-894da00ab4ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1970.674039] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:c1:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9d7fbb8-6b13-4071-9abf-894da00ab4ff', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1970.681892] env[62507]: DEBUG oslo.service.loopingcall [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1970.682406] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1970.682637] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3afb3597-553c-4a1f-b6da-9d3875fcd39f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.702756] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1970.702756] env[62507]: value = "task-2460115" [ 1970.702756] env[62507]: _type = "Task" [ 1970.702756] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.710152] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460115, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.919786] env[62507]: DEBUG nova.compute.manager [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Received event network-vif-plugged-d9d7fbb8-6b13-4071-9abf-894da00ab4ff {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1970.919786] env[62507]: DEBUG oslo_concurrency.lockutils [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] Acquiring lock "425b5171-97c2-4700-ad5f-c79aadb39eae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1970.919885] env[62507]: DEBUG oslo_concurrency.lockutils [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] Lock "425b5171-97c2-4700-ad5f-c79aadb39eae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.920018] env[62507]: DEBUG oslo_concurrency.lockutils [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] Lock "425b5171-97c2-4700-ad5f-c79aadb39eae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.920222] env[62507]: DEBUG nova.compute.manager [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] No waiting events found dispatching network-vif-plugged-d9d7fbb8-6b13-4071-9abf-894da00ab4ff {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1970.920411] env[62507]: WARNING nova.compute.manager [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Received unexpected event network-vif-plugged-d9d7fbb8-6b13-4071-9abf-894da00ab4ff for instance with vm_state building and task_state spawning. [ 1970.920581] env[62507]: DEBUG nova.compute.manager [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Received event network-changed-d9d7fbb8-6b13-4071-9abf-894da00ab4ff {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1970.920738] env[62507]: DEBUG nova.compute.manager [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Refreshing instance network info cache due to event network-changed-d9d7fbb8-6b13-4071-9abf-894da00ab4ff. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1970.920922] env[62507]: DEBUG oslo_concurrency.lockutils [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] Acquiring lock "refresh_cache-425b5171-97c2-4700-ad5f-c79aadb39eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.921073] env[62507]: DEBUG oslo_concurrency.lockutils [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] Acquired lock "refresh_cache-425b5171-97c2-4700-ad5f-c79aadb39eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.921238] env[62507]: DEBUG nova.network.neutron [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Refreshing network info cache for port d9d7fbb8-6b13-4071-9abf-894da00ab4ff {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1971.175290] env[62507]: DEBUG nova.network.neutron [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Updated VIF entry in instance network info cache for port d9d7fbb8-6b13-4071-9abf-894da00ab4ff. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1971.175629] env[62507]: DEBUG nova.network.neutron [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Updating instance_info_cache with network_info: [{"id": "d9d7fbb8-6b13-4071-9abf-894da00ab4ff", "address": "fa:16:3e:66:c1:ec", "network": {"id": "cd916907-36a3-476b-86b5-4b5b489d0b84", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2065590662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24f3f26978fa490fa5fe8dcd8573c61e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9d7fbb8-6b", "ovs_interfaceid": "d9d7fbb8-6b13-4071-9abf-894da00ab4ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.185484] env[62507]: DEBUG oslo_concurrency.lockutils [req-8651e8e5-8051-4b76-abb1-82f22d9e5bc0 req-e68d10a7-170d-4619-814d-d98edf1cc806 service nova] Releasing lock "refresh_cache-425b5171-97c2-4700-ad5f-c79aadb39eae" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.212903] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460115, 'name': CreateVM_Task, 'duration_secs': 0.312667} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.213076] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1971.213718] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.213881] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.214213] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1971.214465] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7984e327-5af2-472b-83d0-007027fb51d8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.218851] env[62507]: DEBUG oslo_vmware.api [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 1971.218851] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]521564d6-25d1-ed42-fdd9-0d8b5940f860" [ 1971.218851] env[62507]: _type = "Task" [ 1971.218851] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.230045] env[62507]: DEBUG oslo_vmware.api [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]521564d6-25d1-ed42-fdd9-0d8b5940f860, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.729449] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.729739] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1971.729909] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.318342] env[62507]: DEBUG oslo_concurrency.lockutils [None req-d86096c4-6ab9-424e-a53d-48c042cd0e29 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "425b5171-97c2-4700-ad5f-c79aadb39eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.713152] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "7adef000-4700-4c2f-a7ea-09baf40cedf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.713435] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "7adef000-4700-4c2f-a7ea-09baf40cedf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.944778] env[62507]: WARNING oslo_vmware.rw_handles [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2015.944778] env[62507]: ERROR oslo_vmware.rw_handles [ 2015.945427] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2015.947145] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2015.947390] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Copying Virtual Disk [datastore2] vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/2b6dc215-7a1a-4a9e-997e-5566c52ba0f4/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2015.947676] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab901726-912e-4083-8fc4-4918a1eb7e1f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.955590] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 2015.955590] env[62507]: value = "task-2460116" [ 2015.955590] env[62507]: _type = "Task" [ 2015.955590] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.963347] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': task-2460116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.467726] env[62507]: DEBUG oslo_vmware.exceptions [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2016.468221] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.468815] env[62507]: ERROR nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2016.468815] env[62507]: Faults: ['InvalidArgument'] [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Traceback (most recent call last): [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] yield resources [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self.driver.spawn(context, instance, image_meta, [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self._fetch_image_if_missing(context, vi) [ 2016.468815] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] image_cache(vi, tmp_image_ds_loc) [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] vm_util.copy_virtual_disk( [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] session._wait_for_task(vmdk_copy_task) [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] return self.wait_for_task(task_ref) [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] return evt.wait() [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] result = hub.switch() [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2016.469165] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] return self.greenlet.switch() [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self.f(*self.args, **self.kw) [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] raise exceptions.translate_fault(task_info.error) [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Faults: ['InvalidArgument'] [ 2016.469656] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] [ 2016.469656] env[62507]: INFO nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Terminating instance [ 2016.470763] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.470974] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2016.471240] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b52fe7fe-4784-4be0-b3c6-0b3980bed3e3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.473691] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2016.473880] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2016.474618] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d367213-d466-4119-a5e0-119cc94e3e5b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.481395] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2016.481617] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cd84197-7a11-41e3-8295-745c4c7097f8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.483804] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2016.483979] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2016.484954] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e6643e-d456-49e0-afc1-f248cfdc34fa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.489671] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 2016.489671] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]528e8c59-f964-4ac7-528f-07c4257b881e" [ 2016.489671] env[62507]: _type = "Task" [ 2016.489671] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.497083] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]528e8c59-f964-4ac7-528f-07c4257b881e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.556068] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2016.556264] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2016.556449] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Deleting the datastore file [datastore2] 637de77e-d142-45ca-8a4e-3bf365e31502 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2016.556823] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cee22a3-ab27-4e64-b166-8f62687b384f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.562709] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for the task: (returnval){ [ 2016.562709] env[62507]: value = "task-2460118" [ 2016.562709] env[62507]: _type = "Task" [ 2016.562709] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.570654] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': task-2460118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.000816] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2017.001150] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating directory with path [datastore2] vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.001276] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3969acbf-c13f-46d1-a7df-a85723b6d9b8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.011500] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created directory with path [datastore2] vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.011685] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Fetch image to [datastore2] vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2017.011861] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2017.012593] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b5974a-f1ad-4765-935f-4eba984d1cc0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.018813] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad9a0a3-d66c-4247-baba-2bed70442d79 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.028469] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df33003-d3fd-4aab-9e5b-40c27b67d871 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.058332] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b592b8-e245-48fe-8f2a-c7455a0ab6cb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.066151] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c4ba6c9a-4879-414c-beb5-cc0f595974f3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.072280] env[62507]: DEBUG oslo_vmware.api [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Task: {'id': task-2460118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058776} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.072538] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2017.072718] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2017.072890] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2017.073092] env[62507]: INFO nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2017.075151] env[62507]: DEBUG nova.compute.claims [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2017.075326] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.075540] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.088889] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2017.141823] env[62507]: DEBUG oslo_vmware.rw_handles [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2017.202298] env[62507]: DEBUG oslo_vmware.rw_handles [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2017.202493] env[62507]: DEBUG oslo_vmware.rw_handles [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2017.318719] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2193a968-2620-45d5-929b-d8b4bf997088 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.326387] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb53ffdf-3cb1-46e1-8928-5aa5250dee24 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.356400] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e81c70-a086-4ddd-b674-15c7c7ca4c54 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.362905] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b60343-b732-4136-9651-3df1d8324b04 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.375313] env[62507]: DEBUG nova.compute.provider_tree [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.385226] env[62507]: DEBUG nova.scheduler.client.report [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2017.399052] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.399574] env[62507]: ERROR nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.399574] env[62507]: Faults: ['InvalidArgument'] [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Traceback (most recent call last): [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self.driver.spawn(context, instance, image_meta, [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self._fetch_image_if_missing(context, vi) [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] image_cache(vi, tmp_image_ds_loc) [ 2017.399574] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] vm_util.copy_virtual_disk( [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] session._wait_for_task(vmdk_copy_task) [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] return self.wait_for_task(task_ref) [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] return evt.wait() [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] result = hub.switch() [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] return self.greenlet.switch() [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2017.399885] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] self.f(*self.args, **self.kw) [ 2017.400170] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2017.400170] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] raise exceptions.translate_fault(task_info.error) [ 2017.400170] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.400170] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Faults: ['InvalidArgument'] [ 2017.400170] env[62507]: ERROR nova.compute.manager [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] [ 2017.400284] env[62507]: DEBUG nova.compute.utils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2017.401588] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Build of instance 637de77e-d142-45ca-8a4e-3bf365e31502 was re-scheduled: A specified parameter was not correct: fileType [ 2017.401588] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2017.401960] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2017.402149] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2017.402326] env[62507]: DEBUG nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2017.402505] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2017.549474] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.758286] env[62507]: DEBUG nova.network.neutron [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.776160] env[62507]: INFO nova.compute.manager [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Took 0.37 seconds to deallocate network for instance. [ 2017.897374] env[62507]: INFO nova.scheduler.client.report [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Deleted allocations for instance 637de77e-d142-45ca-8a4e-3bf365e31502 [ 2017.917770] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a972ceb1-9dc1-4382-af97-203703f8f2bb tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "637de77e-d142-45ca-8a4e-3bf365e31502" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 649.895s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.919317] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "637de77e-d142-45ca-8a4e-3bf365e31502" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 454.117s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.919547] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Acquiring lock "637de77e-d142-45ca-8a4e-3bf365e31502-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.919956] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "637de77e-d142-45ca-8a4e-3bf365e31502-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.920171] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "637de77e-d142-45ca-8a4e-3bf365e31502-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.922013] env[62507]: INFO nova.compute.manager [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Terminating instance [ 2017.923839] env[62507]: DEBUG nova.compute.manager [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2017.924067] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2017.924591] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4fc5f02-6bd9-446b-902d-c5095064c5c7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.930522] env[62507]: DEBUG nova.compute.manager [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2017.937008] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333f5210-abc3-4a37-ba75-cbf9c69756aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.968408] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 637de77e-d142-45ca-8a4e-3bf365e31502 could not be found. [ 2017.968600] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2017.968777] env[62507]: INFO nova.compute.manager [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2017.969048] env[62507]: DEBUG oslo.service.loopingcall [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2017.971134] env[62507]: DEBUG nova.compute.manager [-] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2017.971233] env[62507]: DEBUG nova.network.neutron [-] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2017.985021] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.985153] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.986630] env[62507]: INFO nova.compute.claims [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2017.996234] env[62507]: DEBUG nova.network.neutron [-] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.006462] env[62507]: INFO nova.compute.manager [-] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] Took 0.04 seconds to deallocate network for instance. [ 2018.101017] env[62507]: DEBUG oslo_concurrency.lockutils [None req-f9c99faf-518b-48f2-ba71-bdeb291d25f2 tempest-ServerDiskConfigTestJSON-51376656 tempest-ServerDiskConfigTestJSON-51376656-project-member] Lock "637de77e-d142-45ca-8a4e-3bf365e31502" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.101857] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "637de77e-d142-45ca-8a4e-3bf365e31502" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 281.739s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.102062] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 637de77e-d142-45ca-8a4e-3bf365e31502] During sync_power_state the instance has a pending task (deleting). Skip. [ 2018.102238] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "637de77e-d142-45ca-8a4e-3bf365e31502" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.168615] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45023828-e717-4746-a079-74b7ece34465 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.176423] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c909623c-5e77-4e54-ba10-85847a1eb5d4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.205285] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d8ce1c-0570-48a3-9ea8-d6feee589f05 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.211796] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee63ccff-769a-4d9d-ae28-e665d02f5fac {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.224501] env[62507]: DEBUG nova.compute.provider_tree [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.232823] env[62507]: DEBUG nova.scheduler.client.report [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2018.247636] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.248114] env[62507]: DEBUG nova.compute.manager [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2018.280768] env[62507]: DEBUG nova.compute.utils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2018.282223] env[62507]: DEBUG nova.compute.manager [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Not allocating networking since 'none' was specified. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2018.293218] env[62507]: DEBUG nova.compute.manager [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2018.352736] env[62507]: DEBUG nova.compute.manager [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2018.378220] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2018.378470] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2018.378629] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2018.378812] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2018.378959] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2018.379118] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2018.379322] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2018.379480] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2018.379680] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2018.379836] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2018.380021] env[62507]: DEBUG nova.virt.hardware [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2018.380909] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe708724-412a-4a52-b8bc-46b4196a6230 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.388480] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530b2301-f853-4a1f-8fc9-33f32e7681d7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.401534] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Instance VIF info [] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2018.407108] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Creating folder: Project (7fdcbd90e2f5494dbb9c0f9ffefb693e). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2018.407354] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94b80fe5-2da2-4532-ab73-065f9147b028 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.415280] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Created folder: Project (7fdcbd90e2f5494dbb9c0f9ffefb693e) in parent group-v497991. [ 2018.415461] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Creating folder: Instances. Parent ref: group-v498101. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2018.415658] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1f96e7a-c9bc-43b7-b202-6a5a99b7eddf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.422791] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Created folder: Instances in parent group-v498101. [ 2018.423014] env[62507]: DEBUG oslo.service.loopingcall [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.423201] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2018.423384] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d96d462-a63a-4a88-b3bc-cddf6e8f8542 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.438701] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2018.438701] env[62507]: value = "task-2460121" [ 2018.438701] env[62507]: _type = "Task" [ 2018.438701] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.445314] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460121, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.948818] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460121, 'name': CreateVM_Task, 'duration_secs': 0.239683} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.949048] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2018.949509] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.949679] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.949987] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2018.950239] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd5dd351-fd06-41a4-9486-4a483a059720 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.954472] env[62507]: DEBUG oslo_vmware.api [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Waiting for the task: (returnval){ [ 2018.954472] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5293876e-58d5-00a1-35eb-2a41bf0740dc" [ 2018.954472] env[62507]: _type = "Task" [ 2018.954472] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.961344] env[62507]: DEBUG oslo_vmware.api [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]5293876e-58d5-00a1-35eb-2a41bf0740dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.465019] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.465347] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2019.465549] env[62507]: DEBUG oslo_concurrency.lockutils [None req-395f5840-8c0f-4e2d-bc2e-1fc1782f1617 tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.163926] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.167510] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.167846] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2020.167846] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2020.188368] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.188518] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.188655] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.188787] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.188928] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.189245] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.189401] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.189530] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.189655] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.189778] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2020.189902] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2021.168197] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.168581] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.168581] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2022.168798] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2023.168638] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.167596] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.167938] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.178856] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.179085] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.179257] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.179414] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2024.180591] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1347f9-0ccd-4ac1-9a18-5f9fbded2c98 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.189399] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69387c04-3bf5-4e77-9222-e22eb25a252e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.202930] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53af4ff3-d33d-4f81-b6c7-0ea727440934 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.210049] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d7f2b9-b3c7-48d3-b171-9e364b4c8433 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.239280] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181173MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2024.239423] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.239626] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.313894] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314082] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314219] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314344] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314466] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314588] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314711] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314833] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.314973] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.315125] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.326041] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2024.326291] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2024.326409] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2024.344425] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing inventories for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2024.358725] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating ProviderTree inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2024.358919] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2024.370641] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing aggregate associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, aggregates: None {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2024.390419] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing trait associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2024.526861] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b53237f-0369-4d9c-8fa8-c65a3bd44ac8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.534136] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64651998-ee8c-49e1-87ba-733047ab6c17 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.563333] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437b7247-e67e-4392-a7e0-af4b943db9fa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.569868] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7388b675-9ef1-4420-bf8c-bf0bf3bbb600 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.584329] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2024.592583] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2024.605597] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2024.605733] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.366s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.167949] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.168232] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances with incomplete migration {{(pid=62507) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2031.177021] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.184107] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2039.184444] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2039.193785] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] There are 0 instances to clean {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2066.179633] env[62507]: WARNING oslo_vmware.rw_handles [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2066.179633] env[62507]: ERROR oslo_vmware.rw_handles [ 2066.180504] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2066.182561] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2066.182814] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Copying Virtual Disk [datastore2] vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/0ea780a8-a0e4-4ff3-881f-fd6d93a8435e/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2066.183144] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ac09bf0-2fcc-46d9-bc07-4f8e84a2e5ec {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.191236] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 2066.191236] env[62507]: value = "task-2460122" [ 2066.191236] env[62507]: _type = "Task" [ 2066.191236] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.199220] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': task-2460122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.701978] env[62507]: DEBUG oslo_vmware.exceptions [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2066.702292] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.702899] env[62507]: ERROR nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.702899] env[62507]: Faults: ['InvalidArgument'] [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Traceback (most recent call last): [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] yield resources [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self.driver.spawn(context, instance, image_meta, [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self._fetch_image_if_missing(context, vi) [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2066.702899] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] image_cache(vi, tmp_image_ds_loc) [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] vm_util.copy_virtual_disk( [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] session._wait_for_task(vmdk_copy_task) [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] return self.wait_for_task(task_ref) [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] return evt.wait() [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] result = hub.switch() [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] return self.greenlet.switch() [ 2066.703281] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2066.703568] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self.f(*self.args, **self.kw) [ 2066.703568] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2066.703568] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] raise exceptions.translate_fault(task_info.error) [ 2066.703568] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.703568] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Faults: ['InvalidArgument'] [ 2066.703568] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] [ 2066.703568] env[62507]: INFO nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Terminating instance [ 2066.704901] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.705137] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2066.705587] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-800ef93f-128b-43d7-84dc-ce534c17104e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.707581] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2066.708172] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2066.708508] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e28ad8-67fa-4af4-a89e-02a684df5645 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.715433] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2066.715642] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef2d0695-1d29-4a83-bf0d-01562205ffe5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.717772] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2066.717945] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2066.718871] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98dc554-15b9-4392-9787-0f09b2005851 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.723307] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Waiting for the task: (returnval){ [ 2066.723307] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]526b6451-57f6-8a59-d54f-5c06e3555a35" [ 2066.723307] env[62507]: _type = "Task" [ 2066.723307] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.731600] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]526b6451-57f6-8a59-d54f-5c06e3555a35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.789523] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2066.789904] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2066.789904] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Deleting the datastore file [datastore2] fb7f3a79-bd28-48b9-9a64-db1750b0f716 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.790232] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64ee624d-7dca-4da4-bcf8-d336cb022542 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.796675] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 2066.796675] env[62507]: value = "task-2460124" [ 2066.796675] env[62507]: _type = "Task" [ 2066.796675] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.804061] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': task-2460124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.233888] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2067.234307] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Creating directory with path [datastore2] vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2067.234439] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e61ffec3-4762-48d7-8e76-b3ba51b1abec {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.246383] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Created directory with path [datastore2] vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2067.246559] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Fetch image to [datastore2] vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2067.246729] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2067.247476] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded7956a-31ec-48ae-8075-6a510caacb3f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.253743] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4f888e-6bda-4380-864e-967ea2600b35 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.262401] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8511285e-813f-471a-9f6e-d5f30deb9aa2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.291998] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd4b023-5c1e-4e8d-9c03-eca9b6648dfb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.299861] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-db537e8d-f44a-4265-9e3c-251bd7e92443 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.305634] env[62507]: DEBUG oslo_vmware.api [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': task-2460124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068073} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.305855] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.306048] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2067.306234] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2067.306405] env[62507]: INFO nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2067.308511] env[62507]: DEBUG nova.compute.claims [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2067.308679] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.308924] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.322526] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2067.453922] env[62507]: DEBUG oslo_vmware.rw_handles [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2067.514985] env[62507]: DEBUG oslo_vmware.rw_handles [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2067.515229] env[62507]: DEBUG oslo_vmware.rw_handles [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2067.536571] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb30427-ade9-4fb5-a9dc-21b47e83914c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.544144] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a16025-f880-4141-a528-f46794c040bd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.574938] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596a8afe-2a0d-4356-bdbd-08c785aa798b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.582202] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7933fb-a767-4ec7-8e96-da16af42b9d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.595240] env[62507]: DEBUG nova.compute.provider_tree [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.603459] env[62507]: DEBUG nova.scheduler.client.report [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2067.616726] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.616868] env[62507]: ERROR nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.616868] env[62507]: Faults: ['InvalidArgument'] [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Traceback (most recent call last): [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self.driver.spawn(context, instance, image_meta, [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self._fetch_image_if_missing(context, vi) [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] image_cache(vi, tmp_image_ds_loc) [ 2067.616868] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] vm_util.copy_virtual_disk( [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] session._wait_for_task(vmdk_copy_task) [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] return self.wait_for_task(task_ref) [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] return evt.wait() [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] result = hub.switch() [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] return self.greenlet.switch() [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2067.617248] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] self.f(*self.args, **self.kw) [ 2067.617717] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2067.617717] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] raise exceptions.translate_fault(task_info.error) [ 2067.617717] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.617717] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Faults: ['InvalidArgument'] [ 2067.617717] env[62507]: ERROR nova.compute.manager [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] [ 2067.617717] env[62507]: DEBUG nova.compute.utils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2067.619044] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Build of instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 was re-scheduled: A specified parameter was not correct: fileType [ 2067.619044] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2067.619421] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2067.619594] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2067.619770] env[62507]: DEBUG nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2067.619943] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2067.930446] env[62507]: DEBUG nova.network.neutron [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.941724] env[62507]: INFO nova.compute.manager [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Took 0.32 seconds to deallocate network for instance. [ 2068.029840] env[62507]: INFO nova.scheduler.client.report [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Deleted allocations for instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 [ 2068.051310] env[62507]: DEBUG oslo_concurrency.lockutils [None req-3a42ef23-9bd6-4bcf-801c-0b760bc0376e tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 681.222s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.052444] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 486.078s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.052675] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquiring lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.053586] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.053586] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.055081] env[62507]: INFO nova.compute.manager [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Terminating instance [ 2068.056836] env[62507]: DEBUG nova.compute.manager [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2068.056916] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2068.057343] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9a4ddfd-9f6e-4157-896b-b1b120c52e6b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.063349] env[62507]: DEBUG nova.compute.manager [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2068.069591] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6579298-6dbe-4f36-967f-7118962fdfec {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.100313] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fb7f3a79-bd28-48b9-9a64-db1750b0f716 could not be found. [ 2068.100529] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2068.100711] env[62507]: INFO nova.compute.manager [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2068.100957] env[62507]: DEBUG oslo.service.loopingcall [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.103204] env[62507]: DEBUG nova.compute.manager [-] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2068.103308] env[62507]: DEBUG nova.network.neutron [-] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.117757] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.117992] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.119432] env[62507]: INFO nova.compute.claims [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2068.142428] env[62507]: DEBUG nova.network.neutron [-] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.158344] env[62507]: INFO nova.compute.manager [-] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] Took 0.05 seconds to deallocate network for instance. [ 2068.244411] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a09fcb0c-3e36-4a0b-93fa-38d6679ee4a5 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.245254] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 331.883s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.245459] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: fb7f3a79-bd28-48b9-9a64-db1750b0f716] During sync_power_state the instance has a pending task (deleting). Skip. [ 2068.246073] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "fb7f3a79-bd28-48b9-9a64-db1750b0f716" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.290412] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ebcd9c-cdf0-4114-bb43-e23e79cc148f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.298447] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e245092c-e596-4d47-b211-bdeee5b77a27 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.327869] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f29329-10a0-4cc8-a1c3-ba93ae18e01c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.334525] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd43817-aa6c-40b6-a9ae-6b150eab005c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.347070] env[62507]: DEBUG nova.compute.provider_tree [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.355596] env[62507]: DEBUG nova.scheduler.client.report [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2068.368741] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.251s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.369209] env[62507]: DEBUG nova.compute.manager [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2068.402487] env[62507]: DEBUG nova.compute.utils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2068.403996] env[62507]: DEBUG nova.compute.manager [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2068.404200] env[62507]: DEBUG nova.network.neutron [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2068.414773] env[62507]: DEBUG nova.compute.manager [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2068.470683] env[62507]: DEBUG nova.policy [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8187d3d405c244f995763c4d67515b6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c850b58d9b554e81b09f26703a6f50f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 2068.474710] env[62507]: DEBUG nova.compute.manager [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2068.500392] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2068.500643] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2068.500801] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2068.501081] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2068.501258] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2068.501408] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2068.501613] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2068.501775] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2068.501942] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2068.502117] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2068.502298] env[62507]: DEBUG nova.virt.hardware [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2068.503219] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299cf109-764d-4057-a10b-85ac97caa62e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.511447] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5541e7-cbab-4897-8399-68381908de94 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.818800] env[62507]: DEBUG nova.network.neutron [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Successfully created port: c644a827-a835-496f-a819-e48e338b2456 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2069.438414] env[62507]: DEBUG nova.network.neutron [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Successfully updated port: c644a827-a835-496f-a819-e48e338b2456 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2069.450458] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "refresh_cache-7adef000-4700-4c2f-a7ea-09baf40cedf5" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.450666] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "refresh_cache-7adef000-4700-4c2f-a7ea-09baf40cedf5" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.450827] env[62507]: DEBUG nova.network.neutron [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2069.494109] env[62507]: DEBUG nova.network.neutron [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2069.919595] env[62507]: DEBUG nova.network.neutron [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Updating instance_info_cache with network_info: [{"id": "c644a827-a835-496f-a819-e48e338b2456", "address": "fa:16:3e:6f:62:04", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc644a827-a8", "ovs_interfaceid": "c644a827-a835-496f-a819-e48e338b2456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.932368] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "refresh_cache-7adef000-4700-4c2f-a7ea-09baf40cedf5" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.932658] env[62507]: DEBUG nova.compute.manager [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Instance network_info: |[{"id": "c644a827-a835-496f-a819-e48e338b2456", "address": "fa:16:3e:6f:62:04", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc644a827-a8", "ovs_interfaceid": "c644a827-a835-496f-a819-e48e338b2456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2069.933423] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:62:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c644a827-a835-496f-a819-e48e338b2456', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2069.941167] env[62507]: DEBUG oslo.service.loopingcall [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2069.941682] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2069.942299] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3659148-d78d-4550-a5f0-0d72648bc034 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.963302] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2069.963302] env[62507]: value = "task-2460125" [ 2069.963302] env[62507]: _type = "Task" [ 2069.963302] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.973749] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460125, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.975875] env[62507]: DEBUG nova.compute.manager [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Received event network-vif-plugged-c644a827-a835-496f-a819-e48e338b2456 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2069.976140] env[62507]: DEBUG oslo_concurrency.lockutils [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] Acquiring lock "7adef000-4700-4c2f-a7ea-09baf40cedf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.976395] env[62507]: DEBUG oslo_concurrency.lockutils [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] Lock "7adef000-4700-4c2f-a7ea-09baf40cedf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.976569] env[62507]: DEBUG oslo_concurrency.lockutils [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] Lock "7adef000-4700-4c2f-a7ea-09baf40cedf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.976737] env[62507]: DEBUG nova.compute.manager [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] No waiting events found dispatching network-vif-plugged-c644a827-a835-496f-a819-e48e338b2456 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2069.976903] env[62507]: WARNING nova.compute.manager [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Received unexpected event network-vif-plugged-c644a827-a835-496f-a819-e48e338b2456 for instance with vm_state building and task_state spawning. [ 2069.977085] env[62507]: DEBUG nova.compute.manager [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Received event network-changed-c644a827-a835-496f-a819-e48e338b2456 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2069.978021] env[62507]: DEBUG nova.compute.manager [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Refreshing instance network info cache due to event network-changed-c644a827-a835-496f-a819-e48e338b2456. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2069.978021] env[62507]: DEBUG oslo_concurrency.lockutils [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] Acquiring lock "refresh_cache-7adef000-4700-4c2f-a7ea-09baf40cedf5" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.978021] env[62507]: DEBUG oslo_concurrency.lockutils [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] Acquired lock "refresh_cache-7adef000-4700-4c2f-a7ea-09baf40cedf5" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.978021] env[62507]: DEBUG nova.network.neutron [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Refreshing network info cache for port c644a827-a835-496f-a819-e48e338b2456 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2070.269513] env[62507]: DEBUG nova.network.neutron [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Updated VIF entry in instance network info cache for port c644a827-a835-496f-a819-e48e338b2456. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2070.269875] env[62507]: DEBUG nova.network.neutron [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Updating instance_info_cache with network_info: [{"id": "c644a827-a835-496f-a819-e48e338b2456", "address": "fa:16:3e:6f:62:04", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc644a827-a8", "ovs_interfaceid": "c644a827-a835-496f-a819-e48e338b2456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.279117] env[62507]: DEBUG oslo_concurrency.lockutils [req-82e56d21-c661-4d0a-beb0-bfcca6a56967 req-1d45ad24-815b-4463-92d2-57ad34436322 service nova] Releasing lock "refresh_cache-7adef000-4700-4c2f-a7ea-09baf40cedf5" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.472878] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460125, 'name': CreateVM_Task, 'duration_secs': 0.293137} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.473222] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2070.473793] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.473958] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.474468] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2070.474873] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-289b4991-316b-4afb-a004-2afe81b0e5eb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.479037] env[62507]: DEBUG oslo_vmware.api [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 2070.479037] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]526a12ea-4b34-b4c0-ab3c-52fdd65e689d" [ 2070.479037] env[62507]: _type = "Task" [ 2070.479037] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.487170] env[62507]: DEBUG oslo_vmware.api [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]526a12ea-4b34-b4c0-ab3c-52fdd65e689d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.991506] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.992238] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2070.992238] env[62507]: DEBUG oslo_concurrency.lockutils [None req-67d27ea1-a8b3-44f5-b3cc-52a786ff9037 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.177208] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.164216] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.166783] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.166935] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2080.167080] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2080.189012] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189172] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189309] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189440] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189566] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189694] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189907] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.189956] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.190055] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.190174] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2080.190293] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2081.168342] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.168624] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2082.168649] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.168813] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.163231] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.184739] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.184739] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.168408] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.179675] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.179881] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.180061] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.180219] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2085.181440] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af9791b-04c5-42ab-a629-dde2066edc97 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.190008] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0fb56d-42c0-463d-9cd3-405eb8d6a4ea {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.203305] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17339c66-6650-49f1-aab7-dd37f108e83a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.209643] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcbc0d7-e2d4-4966-a19c-5536a4219922 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.237410] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181166MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2085.237549] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.237737] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.326917] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327092] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327226] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327351] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327485] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327606] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327722] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327835] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.327949] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.328075] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.328261] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2085.328399] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2085.438885] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d451ba-2d90-47a6-8eb2-409f9fab880e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.446609] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673b5668-4a80-4f44-ab98-4362fd420147 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.475490] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bd133f-5a8d-4d94-a0fd-c8640a5b1eae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.482482] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93f6ed8-464c-4110-b939-58765d85c002 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.495450] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.504564] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2085.518810] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2085.518810] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.281s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.303741] env[62507]: WARNING oslo_vmware.rw_handles [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2117.303741] env[62507]: ERROR oslo_vmware.rw_handles [ 2117.304480] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2117.306182] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2117.306443] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Copying Virtual Disk [datastore2] vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/3856926d-c116-4591-8777-992529a7cd7b/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2117.306743] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c38af0eb-1751-4248-bf84-38c5da8a840b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.314225] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Waiting for the task: (returnval){ [ 2117.314225] env[62507]: value = "task-2460126" [ 2117.314225] env[62507]: _type = "Task" [ 2117.314225] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.323013] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Task: {'id': task-2460126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.824839] env[62507]: DEBUG oslo_vmware.exceptions [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2117.825210] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.825822] env[62507]: ERROR nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.825822] env[62507]: Faults: ['InvalidArgument'] [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Traceback (most recent call last): [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] yield resources [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self.driver.spawn(context, instance, image_meta, [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self._fetch_image_if_missing(context, vi) [ 2117.825822] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] image_cache(vi, tmp_image_ds_loc) [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] vm_util.copy_virtual_disk( [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] session._wait_for_task(vmdk_copy_task) [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] return self.wait_for_task(task_ref) [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] return evt.wait() [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] result = hub.switch() [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2117.826303] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] return self.greenlet.switch() [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self.f(*self.args, **self.kw) [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] raise exceptions.translate_fault(task_info.error) [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Faults: ['InvalidArgument'] [ 2117.826598] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] [ 2117.826598] env[62507]: INFO nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Terminating instance [ 2117.827726] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.827934] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.828188] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14dc6240-5248-4f93-b7fc-cd1c83cd69bd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.830429] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2117.830622] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2117.831322] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fbb639-0dd6-4f24-8b89-eaa786fe0fdd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.837796] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2117.838012] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec618157-ff17-4222-b035-ca503c4aaed8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.840200] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.840378] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2117.841287] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7626b3b8-02ce-42a1-ab38-2e8824d16616 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.846044] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Waiting for the task: (returnval){ [ 2117.846044] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f392f5-4aec-af21-12b7-300652deafc6" [ 2117.846044] env[62507]: _type = "Task" [ 2117.846044] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.852822] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52f392f5-4aec-af21-12b7-300652deafc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.910868] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2117.911106] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2117.911270] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Deleting the datastore file [datastore2] f257db53-3c5f-4dfc-bd45-9f2b27b49401 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.911523] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17e9ef03-30e6-48b2-8705-6f9ec0422ad6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.918109] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Waiting for the task: (returnval){ [ 2117.918109] env[62507]: value = "task-2460128" [ 2117.918109] env[62507]: _type = "Task" [ 2117.918109] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.925956] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Task: {'id': task-2460128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.356377] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2118.356692] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Creating directory with path [datastore2] vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2118.356881] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f695593-0e10-4924-8b5f-4dd54f0ab0cf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.367528] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Created directory with path [datastore2] vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2118.367710] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Fetch image to [datastore2] vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2118.367877] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2118.368556] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d9d99a-d6cf-4ced-a551-938f0bbc0fbf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.374691] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2854df94-f185-4431-a12f-9e2ad84c0381 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.383197] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a76374b-e899-405c-9939-a89849bdecb8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.412678] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6909a259-0b21-40db-a386-008c00abc7d2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.417806] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c17f7b9c-9d36-4386-9494-33795dd740c8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.427342] env[62507]: DEBUG oslo_vmware.api [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Task: {'id': task-2460128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069468} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.427563] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.427740] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2118.427906] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2118.428095] env[62507]: INFO nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2118.430168] env[62507]: DEBUG nova.compute.claims [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2118.430347] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.430573] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.440771] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2118.491341] env[62507]: DEBUG oslo_vmware.rw_handles [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2118.552868] env[62507]: DEBUG oslo_vmware.rw_handles [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2118.553073] env[62507]: DEBUG oslo_vmware.rw_handles [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2118.651010] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08048c9-0531-406d-93fb-6b32656057b9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.658630] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cd28a7-198a-4495-97da-33c2bcc768de {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.687942] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444d7259-3104-4a51-99cb-8abe211f6b02 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.694325] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76835696-d5a5-4eac-b526-68cdfe47a9cd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.706675] env[62507]: DEBUG nova.compute.provider_tree [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.715141] env[62507]: DEBUG nova.scheduler.client.report [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2118.728269] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.728781] env[62507]: ERROR nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.728781] env[62507]: Faults: ['InvalidArgument'] [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Traceback (most recent call last): [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self.driver.spawn(context, instance, image_meta, [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self._fetch_image_if_missing(context, vi) [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] image_cache(vi, tmp_image_ds_loc) [ 2118.728781] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] vm_util.copy_virtual_disk( [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] session._wait_for_task(vmdk_copy_task) [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] return self.wait_for_task(task_ref) [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] return evt.wait() [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] result = hub.switch() [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] return self.greenlet.switch() [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2118.729119] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] self.f(*self.args, **self.kw) [ 2118.729464] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2118.729464] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] raise exceptions.translate_fault(task_info.error) [ 2118.729464] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.729464] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Faults: ['InvalidArgument'] [ 2118.729464] env[62507]: ERROR nova.compute.manager [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] [ 2118.729464] env[62507]: DEBUG nova.compute.utils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2118.731182] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Build of instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 was re-scheduled: A specified parameter was not correct: fileType [ 2118.731182] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2118.731560] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2118.731738] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2118.731911] env[62507]: DEBUG nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2118.732095] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2119.012653] env[62507]: DEBUG nova.network.neutron [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.026034] env[62507]: INFO nova.compute.manager [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Took 0.29 seconds to deallocate network for instance. [ 2119.125026] env[62507]: INFO nova.scheduler.client.report [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Deleted allocations for instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 [ 2119.147105] env[62507]: DEBUG oslo_concurrency.lockutils [None req-368d7a9a-ae7e-4656-9bce-5034d4060470 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 546.664s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.147105] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 382.784s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.147262] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] During sync_power_state the instance has a pending task (spawning). Skip. [ 2119.148572] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.148572] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 350.737s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.148572] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Acquiring lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.148572] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.148846] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.152319] env[62507]: INFO nova.compute.manager [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Terminating instance [ 2119.154189] env[62507]: DEBUG nova.compute.manager [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2119.154394] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2119.154898] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d86f9136-a20c-4837-9d96-21009d8e141c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.165295] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c821e7b9-056c-4f8b-8721-618f55d6dba4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.193618] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f257db53-3c5f-4dfc-bd45-9f2b27b49401 could not be found. [ 2119.193826] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2119.194057] env[62507]: INFO nova.compute.manager [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2119.194301] env[62507]: DEBUG oslo.service.loopingcall [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.194547] env[62507]: DEBUG nova.compute.manager [-] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2119.194644] env[62507]: DEBUG nova.network.neutron [-] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2119.218477] env[62507]: DEBUG nova.network.neutron [-] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.226024] env[62507]: INFO nova.compute.manager [-] [instance: f257db53-3c5f-4dfc-bd45-9f2b27b49401] Took 0.03 seconds to deallocate network for instance. [ 2119.314068] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7a503915-3522-43c5-a32d-28eb56376c13 tempest-AttachVolumeShelveTestJSON-435323496 tempest-AttachVolumeShelveTestJSON-435323496-project-member] Lock "f257db53-3c5f-4dfc-bd45-9f2b27b49401" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.518443] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.168672] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.169010] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2141.169010] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2141.187889] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188063] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188204] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188335] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188461] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188590] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188714] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188836] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.188958] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2141.189092] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2142.167394] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.167660] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.167813] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2144.167396] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.167700] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.549920] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "33af3273-6d4b-435d-8c40-cdbac591a84f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.550170] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "33af3273-6d4b-435d-8c40-cdbac591a84f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.561292] env[62507]: DEBUG nova.compute.manager [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2144.611568] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.611793] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.613461] env[62507]: INFO nova.compute.claims [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2144.771020] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a8c1b6-e6d2-4701-adaf-0978cac55b10 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.776522] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626145fa-705d-46b4-b382-d98132d98ad3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.805604] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e1154a-63ba-4f0a-98f7-1908433815b3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.812391] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d9b984-c11f-467a-9c29-8a2937c82492 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.825333] env[62507]: DEBUG nova.compute.provider_tree [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2144.833485] env[62507]: DEBUG nova.scheduler.client.report [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2144.847317] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.235s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.847780] env[62507]: DEBUG nova.compute.manager [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2144.879552] env[62507]: DEBUG nova.compute.utils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2144.880692] env[62507]: DEBUG nova.compute.manager [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2144.880868] env[62507]: DEBUG nova.network.neutron [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2144.890573] env[62507]: DEBUG nova.compute.manager [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2144.942417] env[62507]: DEBUG nova.policy [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df0b12531a3e46e4a97a8d4082d6868e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13cb14d09e6f4d84996e4470f4e24eeb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 2144.951226] env[62507]: DEBUG nova.compute.manager [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2144.971360] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2144.971599] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2144.971760] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2144.971944] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2144.972103] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2144.972255] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2144.972460] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2144.972634] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2144.972809] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2144.972977] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2144.973171] env[62507]: DEBUG nova.virt.hardware [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2144.974009] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d9258c-bc22-489f-82f5-1e528920d4e9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.981756] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9599be81-839e-406e-8a54-1f3dd2f27110 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.167459] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.167735] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.167868] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.178448] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.178752] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.178922] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.179092] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2145.180174] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83972979-eb86-41a2-ae31-39ca0ed41026 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.188742] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2271544-06b1-41ba-9f03-b10e4b69d779 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.202406] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ffe139-aa41-43e3-91ee-0c8d8ad87a4a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.210230] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b60388-8393-4ea5-8776-aa9481887789 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.245731] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181177MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2145.245912] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.246316] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.248620] env[62507]: DEBUG nova.network.neutron [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Successfully created port: c8c29db1-0485-474e-a7b5-3cfd5822b640 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2145.328187] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.328357] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.328486] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.328609] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.328728] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.328847] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.328964] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.329096] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.329213] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.329327] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 33af3273-6d4b-435d-8c40-cdbac591a84f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2145.329524] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2145.329665] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2145.466539] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef339c86-776b-418e-817c-72eb7dc4b8f3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.474233] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dd6b09-c728-4706-9588-a0a3e3740400 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.505048] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fad250f-d43c-4f4a-9a13-1ee3fcfe586c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.512442] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a0d6a1-e827-4cd0-99a7-662d79f31045 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.525729] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2145.536707] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2145.556316] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2145.557624] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.310s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.847640] env[62507]: DEBUG nova.compute.manager [req-24308600-0cd2-4569-a26c-e1b4eb0478ab req-876f7227-b48c-4f5d-a75d-d3df910f5275 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Received event network-vif-plugged-c8c29db1-0485-474e-a7b5-3cfd5822b640 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2145.847879] env[62507]: DEBUG oslo_concurrency.lockutils [req-24308600-0cd2-4569-a26c-e1b4eb0478ab req-876f7227-b48c-4f5d-a75d-d3df910f5275 service nova] Acquiring lock "33af3273-6d4b-435d-8c40-cdbac591a84f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.848114] env[62507]: DEBUG oslo_concurrency.lockutils [req-24308600-0cd2-4569-a26c-e1b4eb0478ab req-876f7227-b48c-4f5d-a75d-d3df910f5275 service nova] Lock "33af3273-6d4b-435d-8c40-cdbac591a84f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.848290] env[62507]: DEBUG oslo_concurrency.lockutils [req-24308600-0cd2-4569-a26c-e1b4eb0478ab req-876f7227-b48c-4f5d-a75d-d3df910f5275 service nova] Lock "33af3273-6d4b-435d-8c40-cdbac591a84f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.848457] env[62507]: DEBUG nova.compute.manager [req-24308600-0cd2-4569-a26c-e1b4eb0478ab req-876f7227-b48c-4f5d-a75d-d3df910f5275 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] No waiting events found dispatching network-vif-plugged-c8c29db1-0485-474e-a7b5-3cfd5822b640 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2145.848622] env[62507]: WARNING nova.compute.manager [req-24308600-0cd2-4569-a26c-e1b4eb0478ab req-876f7227-b48c-4f5d-a75d-d3df910f5275 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Received unexpected event network-vif-plugged-c8c29db1-0485-474e-a7b5-3cfd5822b640 for instance with vm_state building and task_state spawning. [ 2145.926472] env[62507]: DEBUG nova.network.neutron [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Successfully updated port: c8c29db1-0485-474e-a7b5-3cfd5822b640 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2145.940458] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "refresh_cache-33af3273-6d4b-435d-8c40-cdbac591a84f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.940609] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "refresh_cache-33af3273-6d4b-435d-8c40-cdbac591a84f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.940763] env[62507]: DEBUG nova.network.neutron [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2145.982245] env[62507]: DEBUG nova.network.neutron [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2146.134694] env[62507]: DEBUG nova.network.neutron [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Updating instance_info_cache with network_info: [{"id": "c8c29db1-0485-474e-a7b5-3cfd5822b640", "address": "fa:16:3e:60:08:eb", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c29db1-04", "ovs_interfaceid": "c8c29db1-0485-474e-a7b5-3cfd5822b640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.147365] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "refresh_cache-33af3273-6d4b-435d-8c40-cdbac591a84f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.147638] env[62507]: DEBUG nova.compute.manager [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Instance network_info: |[{"id": "c8c29db1-0485-474e-a7b5-3cfd5822b640", "address": "fa:16:3e:60:08:eb", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c29db1-04", "ovs_interfaceid": "c8c29db1-0485-474e-a7b5-3cfd5822b640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2146.148048] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:08:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ef746c57-cd18-4883-a0e9-c52937aaf41d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8c29db1-0485-474e-a7b5-3cfd5822b640', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2146.155619] env[62507]: DEBUG oslo.service.loopingcall [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2146.156057] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2146.156286] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84598fe8-110c-44fb-b902-a74c2d9512fe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.176190] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2146.176190] env[62507]: value = "task-2460129" [ 2146.176190] env[62507]: _type = "Task" [ 2146.176190] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.183396] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460129, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.686082] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460129, 'name': CreateVM_Task, 'duration_secs': 0.288443} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.686259] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2146.686933] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.687121] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.687432] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2146.687673] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bf2fe8-c841-45d5-bd43-7c7562987080 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.691764] env[62507]: DEBUG oslo_vmware.api [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 2146.691764] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520093ba-0269-e261-e7e4-5ee2d815cc3e" [ 2146.691764] env[62507]: _type = "Task" [ 2146.691764] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.699933] env[62507]: DEBUG oslo_vmware.api [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520093ba-0269-e261-e7e4-5ee2d815cc3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.202938] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.203287] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2147.203423] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c247624f-78ca-4e86-bd49-38401a0a992a tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.878734] env[62507]: DEBUG nova.compute.manager [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Received event network-changed-c8c29db1-0485-474e-a7b5-3cfd5822b640 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2147.878967] env[62507]: DEBUG nova.compute.manager [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Refreshing instance network info cache due to event network-changed-c8c29db1-0485-474e-a7b5-3cfd5822b640. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2147.879162] env[62507]: DEBUG oslo_concurrency.lockutils [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] Acquiring lock "refresh_cache-33af3273-6d4b-435d-8c40-cdbac591a84f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.879308] env[62507]: DEBUG oslo_concurrency.lockutils [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] Acquired lock "refresh_cache-33af3273-6d4b-435d-8c40-cdbac591a84f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.879473] env[62507]: DEBUG nova.network.neutron [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Refreshing network info cache for port c8c29db1-0485-474e-a7b5-3cfd5822b640 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2148.329293] env[62507]: DEBUG nova.network.neutron [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Updated VIF entry in instance network info cache for port c8c29db1-0485-474e-a7b5-3cfd5822b640. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2148.329636] env[62507]: DEBUG nova.network.neutron [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Updating instance_info_cache with network_info: [{"id": "c8c29db1-0485-474e-a7b5-3cfd5822b640", "address": "fa:16:3e:60:08:eb", "network": {"id": "0b8e342b-a3cc-46ec-b49a-d67e3624a14b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1312490043-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cb14d09e6f4d84996e4470f4e24eeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ef746c57-cd18-4883-a0e9-c52937aaf41d", "external-id": "nsx-vlan-transportzone-863", "segmentation_id": 863, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c29db1-04", "ovs_interfaceid": "c8c29db1-0485-474e-a7b5-3cfd5822b640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.340050] env[62507]: DEBUG oslo_concurrency.lockutils [req-2020623c-4863-4ca0-8017-492dbfbc4f80 req-df87451b-de68-490b-b5f5-157d5bbe3d80 service nova] Releasing lock "refresh_cache-33af3273-6d4b-435d-8c40-cdbac591a84f" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2155.176135] env[62507]: DEBUG oslo_concurrency.lockutils [None req-4369f799-1331-47e1-95c4-75d3462d64cc tempest-ServerShowV257Test-112853034 tempest-ServerShowV257Test-112853034-project-member] Acquiring lock "479a1e78-23c0-4a96-aa72-aa419c8c251b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.210186] env[62507]: WARNING oslo_vmware.rw_handles [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.210186] env[62507]: ERROR oslo_vmware.rw_handles [ 2166.210783] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2166.212503] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2166.212743] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Copying Virtual Disk [datastore2] vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/57a7d2ff-c0f0-44a2-804e-beb78a32108f/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2166.213061] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-967ce6ae-8fa0-4692-a7d6-9cdbe8803e9a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.221390] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Waiting for the task: (returnval){ [ 2166.221390] env[62507]: value = "task-2460130" [ 2166.221390] env[62507]: _type = "Task" [ 2166.221390] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.228898] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Task: {'id': task-2460130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.731487] env[62507]: DEBUG oslo_vmware.exceptions [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2166.731757] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.732314] env[62507]: ERROR nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2166.732314] env[62507]: Faults: ['InvalidArgument'] [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Traceback (most recent call last): [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] yield resources [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self.driver.spawn(context, instance, image_meta, [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self._fetch_image_if_missing(context, vi) [ 2166.732314] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] image_cache(vi, tmp_image_ds_loc) [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] vm_util.copy_virtual_disk( [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] session._wait_for_task(vmdk_copy_task) [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] return self.wait_for_task(task_ref) [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] return evt.wait() [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] result = hub.switch() [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2166.732679] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] return self.greenlet.switch() [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self.f(*self.args, **self.kw) [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] raise exceptions.translate_fault(task_info.error) [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Faults: ['InvalidArgument'] [ 2166.733097] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] [ 2166.733097] env[62507]: INFO nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Terminating instance [ 2166.735193] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2166.735453] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2166.736127] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2166.736432] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2166.736682] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ae77f44-2b67-4f15-8da7-8f737c4414c8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.739072] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459564a3-5068-4df1-97c3-089bd5d7d01b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.745761] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2166.745990] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0471289-3541-463d-9a41-4a7ba4ef4f2c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.748055] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2166.748237] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2166.749143] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d25e916d-f1fe-44a7-b0c4-1270695fb600 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.753755] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 2166.753755] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52372d56-ebf9-f326-3b96-1049f42cf48b" [ 2166.753755] env[62507]: _type = "Task" [ 2166.753755] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.760458] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52372d56-ebf9-f326-3b96-1049f42cf48b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.819250] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2166.819476] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2166.819664] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Deleting the datastore file [datastore2] 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2166.819936] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec2ed36f-a375-436f-b7d9-519681481646 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.825583] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Waiting for the task: (returnval){ [ 2166.825583] env[62507]: value = "task-2460132" [ 2166.825583] env[62507]: _type = "Task" [ 2166.825583] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.834938] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Task: {'id': task-2460132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.263996] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2167.264323] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating directory with path [datastore2] vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.264498] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1810033b-717c-49ba-9a40-4db5700bbd83 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.276127] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Created directory with path [datastore2] vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.276371] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Fetch image to [datastore2] vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2167.276545] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2167.277286] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945c26f9-5f45-46b5-b49f-678a6bc8a9aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.283624] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbb0641-b5ea-4ba9-bd24-9755e5b30c74 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.292296] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988af342-5028-4563-9874-ebd23a7580b1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.321534] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cfc7d8-a605-42dc-993e-1161cf096cea {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.329353] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-83beddbd-aa4e-458e-ade0-748df4c3f186 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.335095] env[62507]: DEBUG oslo_vmware.api [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Task: {'id': task-2460132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070119} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.335320] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2167.335499] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2167.335668] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2167.335838] env[62507]: INFO nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2167.337893] env[62507]: DEBUG nova.compute.claims [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2167.338077] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.338289] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.348492] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2167.426532] env[62507]: DEBUG oslo_vmware.rw_handles [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2167.488382] env[62507]: DEBUG oslo_vmware.rw_handles [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2167.488577] env[62507]: DEBUG oslo_vmware.rw_handles [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2167.563488] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3777b423-8ae8-4127-b740-01b36c97fc6e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.571117] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e325e1c-ca39-444a-a299-01d9b1900d41 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.601018] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173d78a5-653d-47b8-ae00-c16793eab814 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.608119] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c75450-60dc-451e-ba3b-ea65f0975e7b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.620671] env[62507]: DEBUG nova.compute.provider_tree [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2167.630792] env[62507]: DEBUG nova.scheduler.client.report [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2167.644663] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.645174] env[62507]: ERROR nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.645174] env[62507]: Faults: ['InvalidArgument'] [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Traceback (most recent call last): [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self.driver.spawn(context, instance, image_meta, [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self._fetch_image_if_missing(context, vi) [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] image_cache(vi, tmp_image_ds_loc) [ 2167.645174] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] vm_util.copy_virtual_disk( [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] session._wait_for_task(vmdk_copy_task) [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] return self.wait_for_task(task_ref) [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] return evt.wait() [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] result = hub.switch() [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] return self.greenlet.switch() [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2167.645577] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] self.f(*self.args, **self.kw) [ 2167.645919] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2167.645919] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] raise exceptions.translate_fault(task_info.error) [ 2167.645919] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.645919] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Faults: ['InvalidArgument'] [ 2167.645919] env[62507]: ERROR nova.compute.manager [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] [ 2167.645919] env[62507]: DEBUG nova.compute.utils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2167.647244] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Build of instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 was re-scheduled: A specified parameter was not correct: fileType [ 2167.647244] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2167.647604] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2167.647777] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2167.647947] env[62507]: DEBUG nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2167.648141] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2167.945245] env[62507]: DEBUG nova.network.neutron [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2167.955935] env[62507]: INFO nova.compute.manager [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Took 0.31 seconds to deallocate network for instance. [ 2168.049446] env[62507]: INFO nova.scheduler.client.report [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Deleted allocations for instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 [ 2168.075161] env[62507]: DEBUG oslo_concurrency.lockutils [None req-cd6f9290-cabe-462e-86d5-9153be3ac1dd tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 594.812s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.075563] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 431.713s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.075670] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] During sync_power_state the instance has a pending task (spawning). Skip. [ 2168.075858] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.077347] env[62507]: DEBUG oslo_concurrency.lockutils [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 398.690s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.077347] env[62507]: DEBUG oslo_concurrency.lockutils [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Acquiring lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.077347] env[62507]: DEBUG oslo_concurrency.lockutils [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.077347] env[62507]: DEBUG oslo_concurrency.lockutils [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.081161] env[62507]: INFO nova.compute.manager [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Terminating instance [ 2168.084809] env[62507]: DEBUG nova.compute.manager [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2168.084809] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2168.084809] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5373e846-87bf-4462-9cd2-b3ea289229fa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.094021] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95035cf-297c-47ff-86d7-1a0a870ec02d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.123031] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879 could not be found. [ 2168.123264] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2168.123465] env[62507]: INFO nova.compute.manager [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2168.123685] env[62507]: DEBUG oslo.service.loopingcall [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2168.123926] env[62507]: DEBUG nova.compute.manager [-] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2168.124035] env[62507]: DEBUG nova.network.neutron [-] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2168.146130] env[62507]: DEBUG nova.network.neutron [-] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.154247] env[62507]: INFO nova.compute.manager [-] [instance: 4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879] Took 0.03 seconds to deallocate network for instance. [ 2168.239190] env[62507]: DEBUG oslo_concurrency.lockutils [None req-124c17e1-58a3-4c3a-aca1-dadf761c08f2 tempest-ServerTagsTestJSON-1812105574 tempest-ServerTagsTestJSON-1812105574-project-member] Lock "4fb7ae8d-1ed6-4fb4-a5d4-8ed0fe39b879" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2176.220042] env[62507]: DEBUG oslo_concurrency.lockutils [None req-a5e6412a-c192-453a-a514-56d5cd727a5f tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "7adef000-4700-4c2f-a7ea-09baf40cedf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.765704] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquiring lock "d27a54c7-96d0-467c-8cb2-8b23de43c107" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.765970] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Lock "d27a54c7-96d0-467c-8cb2-8b23de43c107" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.776928] env[62507]: DEBUG nova.compute.manager [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2177.826924] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.827207] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.828803] env[62507]: INFO nova.compute.claims [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2177.989689] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a8cde7-2f8a-49e8-b84d-d8f7dc635d77 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.997051] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c99d59-b6ee-4450-8a17-7251515a4aae {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.027984] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a338d8-69a1-49da-9877-d1be8bc6fa23 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.034523] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4a8611-175a-4077-a85f-a9fe57a7f0a2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.046916] env[62507]: DEBUG nova.compute.provider_tree [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2178.055567] env[62507]: DEBUG nova.scheduler.client.report [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2178.069352] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.069835] env[62507]: DEBUG nova.compute.manager [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2178.100740] env[62507]: DEBUG nova.compute.utils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2178.102574] env[62507]: DEBUG nova.compute.manager [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2178.102676] env[62507]: DEBUG nova.network.neutron [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2178.111798] env[62507]: DEBUG nova.compute.manager [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2178.163223] env[62507]: DEBUG nova.policy [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b27b08bfbedd4b5a87020fb0b8b34f10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ad78653e62e4d8896469b0323a910b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 2178.175595] env[62507]: DEBUG nova.compute.manager [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2178.200350] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2178.200600] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2178.200760] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2178.200943] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2178.201108] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2178.201274] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2178.201485] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2178.201646] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2178.201815] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2178.201977] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2178.202174] env[62507]: DEBUG nova.virt.hardware [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2178.203011] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5645c61-9025-48b1-8722-fd8458125afc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.210938] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb001bd-ae18-45f4-a3d0-ce0784f168e4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.574872] env[62507]: DEBUG nova.network.neutron [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Successfully created port: aa940d61-b2a1-4575-b58a-9c6161f03ff7 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2179.189870] env[62507]: DEBUG nova.compute.manager [req-1f638b6f-87b7-4826-bfe3-9c914e1bde8e req-d96ec266-aece-4437-a47f-b33402075065 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Received event network-vif-plugged-aa940d61-b2a1-4575-b58a-9c6161f03ff7 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2179.190106] env[62507]: DEBUG oslo_concurrency.lockutils [req-1f638b6f-87b7-4826-bfe3-9c914e1bde8e req-d96ec266-aece-4437-a47f-b33402075065 service nova] Acquiring lock "d27a54c7-96d0-467c-8cb2-8b23de43c107-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.190326] env[62507]: DEBUG oslo_concurrency.lockutils [req-1f638b6f-87b7-4826-bfe3-9c914e1bde8e req-d96ec266-aece-4437-a47f-b33402075065 service nova] Lock "d27a54c7-96d0-467c-8cb2-8b23de43c107-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.190500] env[62507]: DEBUG oslo_concurrency.lockutils [req-1f638b6f-87b7-4826-bfe3-9c914e1bde8e req-d96ec266-aece-4437-a47f-b33402075065 service nova] Lock "d27a54c7-96d0-467c-8cb2-8b23de43c107-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.190668] env[62507]: DEBUG nova.compute.manager [req-1f638b6f-87b7-4826-bfe3-9c914e1bde8e req-d96ec266-aece-4437-a47f-b33402075065 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] No waiting events found dispatching network-vif-plugged-aa940d61-b2a1-4575-b58a-9c6161f03ff7 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2179.190831] env[62507]: WARNING nova.compute.manager [req-1f638b6f-87b7-4826-bfe3-9c914e1bde8e req-d96ec266-aece-4437-a47f-b33402075065 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Received unexpected event network-vif-plugged-aa940d61-b2a1-4575-b58a-9c6161f03ff7 for instance with vm_state building and task_state spawning. [ 2179.266548] env[62507]: DEBUG nova.network.neutron [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Successfully updated port: aa940d61-b2a1-4575-b58a-9c6161f03ff7 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2179.288851] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquiring lock "refresh_cache-d27a54c7-96d0-467c-8cb2-8b23de43c107" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2179.289110] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquired lock "refresh_cache-d27a54c7-96d0-467c-8cb2-8b23de43c107" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.289287] env[62507]: DEBUG nova.network.neutron [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2179.330670] env[62507]: DEBUG nova.network.neutron [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2179.499362] env[62507]: DEBUG nova.network.neutron [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Updating instance_info_cache with network_info: [{"id": "aa940d61-b2a1-4575-b58a-9c6161f03ff7", "address": "fa:16:3e:1a:ba:16", "network": {"id": "f166f8df-456b-443b-ab9f-09855eebf2f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1805181695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ad78653e62e4d8896469b0323a910b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa940d61-b2", "ovs_interfaceid": "aa940d61-b2a1-4575-b58a-9c6161f03ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.510575] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Releasing lock "refresh_cache-d27a54c7-96d0-467c-8cb2-8b23de43c107" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.510864] env[62507]: DEBUG nova.compute.manager [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Instance network_info: |[{"id": "aa940d61-b2a1-4575-b58a-9c6161f03ff7", "address": "fa:16:3e:1a:ba:16", "network": {"id": "f166f8df-456b-443b-ab9f-09855eebf2f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1805181695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ad78653e62e4d8896469b0323a910b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa940d61-b2", "ovs_interfaceid": "aa940d61-b2a1-4575-b58a-9c6161f03ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2179.511287] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:ba:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afb671bc-328c-40bf-9c2a-d98695e3d60c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa940d61-b2a1-4575-b58a-9c6161f03ff7', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2179.520157] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Creating folder: Project (1ad78653e62e4d8896469b0323a910b4). Parent ref: group-v497991. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2179.520701] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3506fce8-0490-423f-96e1-639d18e70ee7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.533149] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Created folder: Project (1ad78653e62e4d8896469b0323a910b4) in parent group-v497991. [ 2179.533409] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Creating folder: Instances. Parent ref: group-v498106. {{(pid=62507) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2179.533646] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8836162-c9b9-4345-a7a4-2e30edafb9ee {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.541984] env[62507]: INFO nova.virt.vmwareapi.vm_util [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Created folder: Instances in parent group-v498106. [ 2179.542241] env[62507]: DEBUG oslo.service.loopingcall [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2179.542458] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2179.542664] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39bf0760-3a44-4022-b16e-baecb2d45700 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.561649] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2179.561649] env[62507]: value = "task-2460135" [ 2179.561649] env[62507]: _type = "Task" [ 2179.561649] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.571171] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460135, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.071524] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460135, 'name': CreateVM_Task, 'duration_secs': 0.305687} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.071692] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2180.072431] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2180.072595] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2180.072908] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2180.073287] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b83bdc5b-9ea5-41bd-8311-5e5438c1d7cf {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.077602] env[62507]: DEBUG oslo_vmware.api [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Waiting for the task: (returnval){ [ 2180.077602] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52115dbd-ec9b-9409-e76d-e4c3c76313a8" [ 2180.077602] env[62507]: _type = "Task" [ 2180.077602] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.084844] env[62507]: DEBUG oslo_vmware.api [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52115dbd-ec9b-9409-e76d-e4c3c76313a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.588248] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2180.588575] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2180.588763] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8dc72be0-6903-4ccc-ac22-64f1d176da8e tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.217380] env[62507]: DEBUG nova.compute.manager [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Received event network-changed-aa940d61-b2a1-4575-b58a-9c6161f03ff7 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2181.217580] env[62507]: DEBUG nova.compute.manager [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Refreshing instance network info cache due to event network-changed-aa940d61-b2a1-4575-b58a-9c6161f03ff7. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2181.217791] env[62507]: DEBUG oslo_concurrency.lockutils [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] Acquiring lock "refresh_cache-d27a54c7-96d0-467c-8cb2-8b23de43c107" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.217936] env[62507]: DEBUG oslo_concurrency.lockutils [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] Acquired lock "refresh_cache-d27a54c7-96d0-467c-8cb2-8b23de43c107" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2181.218112] env[62507]: DEBUG nova.network.neutron [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Refreshing network info cache for port aa940d61-b2a1-4575-b58a-9c6161f03ff7 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2181.533714] env[62507]: DEBUG nova.network.neutron [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Updated VIF entry in instance network info cache for port aa940d61-b2a1-4575-b58a-9c6161f03ff7. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2181.534075] env[62507]: DEBUG nova.network.neutron [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Updating instance_info_cache with network_info: [{"id": "aa940d61-b2a1-4575-b58a-9c6161f03ff7", "address": "fa:16:3e:1a:ba:16", "network": {"id": "f166f8df-456b-443b-ab9f-09855eebf2f2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1805181695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ad78653e62e4d8896469b0323a910b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa940d61-b2", "ovs_interfaceid": "aa940d61-b2a1-4575-b58a-9c6161f03ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2181.543078] env[62507]: DEBUG oslo_concurrency.lockutils [req-f15432af-f1fe-40fb-9ad1-4236ed3bcd48 req-001035fa-613d-4ea7-97ca-12a99b3f3270 service nova] Releasing lock "refresh_cache-d27a54c7-96d0-467c-8cb2-8b23de43c107" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.557325] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.168622] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.168896] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2202.168896] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2202.190264] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.190410] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.190544] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.190676] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.190799] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.190922] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.191179] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.191344] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.191472] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.191596] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2202.191764] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2202.192279] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.192426] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2203.186749] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.167465] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.167825] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.167865] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.167780] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.168055] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.179937] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.180166] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.180334] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.180489] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2206.181566] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3973df06-a110-4f99-ad59-363a40768185 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.190235] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deeb4977-065f-4159-9eee-3836ab9fb9bb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.203758] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4e3e60-94ae-4896-9ad3-463d52897237 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.209580] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6279cf-5582-45a0-8995-900e04de1983 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.246242] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181150MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2206.246400] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.246590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.315669] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 1c56d4af-ba43-4141-86d6-880ff384041e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.315837] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.315966] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316102] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316224] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316341] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316457] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316579] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316692] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 33af3273-6d4b-435d-8c40-cdbac591a84f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316806] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d27a54c7-96d0-467c-8cb2-8b23de43c107 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.316983] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2206.317131] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2206.432354] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382aad61-9dad-4a16-9b05-fb611b41a901 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.439408] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fbaf72-4728-40e4-ad8b-66ecfcf13485 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.469796] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e009d2-7e4c-4923-9132-4cdc3947addd {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.476965] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c97865-21ae-4b5f-8a4d-8645319c8cf2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.490924] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2206.500223] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2206.513182] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2206.513381] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.267s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.508603] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2217.337788] env[62507]: WARNING oslo_vmware.rw_handles [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2217.337788] env[62507]: ERROR oslo_vmware.rw_handles [ 2217.338408] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2217.340474] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2217.340724] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Copying Virtual Disk [datastore2] vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/2ca74b16-ff2e-4b0f-b9e4-2fdbfb4b2972/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2217.341026] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bde4fc88-2077-4e64-be77-81765e3a6695 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.348944] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 2217.348944] env[62507]: value = "task-2460136" [ 2217.348944] env[62507]: _type = "Task" [ 2217.348944] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.356535] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': task-2460136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.861026] env[62507]: DEBUG oslo_vmware.exceptions [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2217.861026] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.861026] env[62507]: ERROR nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.861026] env[62507]: Faults: ['InvalidArgument'] [ 2217.861026] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Traceback (most recent call last): [ 2217.861026] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2217.861026] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] yield resources [ 2217.861026] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2217.861026] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self.driver.spawn(context, instance, image_meta, [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self._fetch_image_if_missing(context, vi) [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] image_cache(vi, tmp_image_ds_loc) [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] vm_util.copy_virtual_disk( [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] session._wait_for_task(vmdk_copy_task) [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] return self.wait_for_task(task_ref) [ 2217.861460] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] return evt.wait() [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] result = hub.switch() [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] return self.greenlet.switch() [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self.f(*self.args, **self.kw) [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] raise exceptions.translate_fault(task_info.error) [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Faults: ['InvalidArgument'] [ 2217.861777] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] [ 2217.862079] env[62507]: INFO nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Terminating instance [ 2217.862079] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.863114] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2217.863744] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2217.863958] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2217.864206] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0228a650-8028-4d9c-834a-b05f4ee4d066 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.866600] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3479176e-8dec-4178-ad68-62e877e2db03 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.873190] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2217.873395] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbb71047-8ae4-43cd-8b7e-2ddcc5b8e7ff {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.875598] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2217.875798] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2217.876736] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e26079c-4941-456a-a13b-c43ec679c522 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.881275] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2217.881275] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52b7e078-7d55-4dda-3ef5-ba875e31c714" [ 2217.881275] env[62507]: _type = "Task" [ 2217.881275] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.888063] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52b7e078-7d55-4dda-3ef5-ba875e31c714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.939697] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2217.939942] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2217.940102] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Deleting the datastore file [datastore2] 1c56d4af-ba43-4141-86d6-880ff384041e {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.940366] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a984022-d11d-4851-a2a8-77756681106e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.946862] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 2217.946862] env[62507]: value = "task-2460138" [ 2217.946862] env[62507]: _type = "Task" [ 2217.946862] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.953893] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': task-2460138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.392938] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2218.393306] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating directory with path [datastore2] vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2218.393388] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ca11ada-b53f-4ba7-9a75-09a5c4ce8109 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.404015] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Created directory with path [datastore2] vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2218.404219] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Fetch image to [datastore2] vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2218.404393] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2218.405098] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67a17d7-fefd-4461-bd1e-b0e9a38a09e8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.411409] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367975eb-82a3-4329-ac50-8b683f1558aa {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.419999] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5ed6f4-cf71-4176-8996-ea92405883c8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.451824] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d025abf-e077-47ce-ba7d-c0f12669dd6f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.459923] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4e7482e5-fee4-4db7-88a2-4b7a3bca7c2d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.461519] env[62507]: DEBUG oslo_vmware.api [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': task-2460138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075321} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.461749] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2218.461945] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2218.462167] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2218.462347] env[62507]: INFO nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2218.464724] env[62507]: DEBUG nova.compute.claims [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2218.464900] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.465131] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.482197] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2218.537646] env[62507]: DEBUG oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2218.599079] env[62507]: DEBUG oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2218.599291] env[62507]: DEBUG oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2218.685119] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e209b7c8-85c9-4037-a837-724495e0ab97 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.692668] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6795e123-a81e-4b9e-aeaa-f60685ff4b62 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.722957] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2381e1-eef9-420e-af6e-0ee9550c85f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.730185] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9bdbde-5811-43c9-af3f-c9350b214a0e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.742706] env[62507]: DEBUG nova.compute.provider_tree [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2218.751214] env[62507]: DEBUG nova.scheduler.client.report [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2218.763794] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.764317] env[62507]: ERROR nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2218.764317] env[62507]: Faults: ['InvalidArgument'] [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Traceback (most recent call last): [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self.driver.spawn(context, instance, image_meta, [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self._fetch_image_if_missing(context, vi) [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] image_cache(vi, tmp_image_ds_loc) [ 2218.764317] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] vm_util.copy_virtual_disk( [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] session._wait_for_task(vmdk_copy_task) [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] return self.wait_for_task(task_ref) [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] return evt.wait() [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] result = hub.switch() [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] return self.greenlet.switch() [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2218.764662] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] self.f(*self.args, **self.kw) [ 2218.765036] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2218.765036] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] raise exceptions.translate_fault(task_info.error) [ 2218.765036] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2218.765036] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Faults: ['InvalidArgument'] [ 2218.765036] env[62507]: ERROR nova.compute.manager [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] [ 2218.765036] env[62507]: DEBUG nova.compute.utils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2218.766367] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Build of instance 1c56d4af-ba43-4141-86d6-880ff384041e was re-scheduled: A specified parameter was not correct: fileType [ 2218.766367] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2218.766738] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2218.766934] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2218.767144] env[62507]: DEBUG nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2218.767316] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2219.034247] env[62507]: DEBUG nova.network.neutron [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.044017] env[62507]: INFO nova.compute.manager [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Took 0.28 seconds to deallocate network for instance. [ 2219.130743] env[62507]: INFO nova.scheduler.client.report [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Deleted allocations for instance 1c56d4af-ba43-4141-86d6-880ff384041e [ 2219.154043] env[62507]: DEBUG oslo_concurrency.lockutils [None req-735b5c3d-de37-4a2b-b51b-29bb26f99269 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "1c56d4af-ba43-4141-86d6-880ff384041e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.343s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.154043] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "1c56d4af-ba43-4141-86d6-880ff384041e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 482.791s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.154232] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] During sync_power_state the instance has a pending task (spawning). Skip. [ 2219.154319] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "1c56d4af-ba43-4141-86d6-880ff384041e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.154576] env[62507]: DEBUG oslo_concurrency.lockutils [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "1c56d4af-ba43-4141-86d6-880ff384041e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.291s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.154865] env[62507]: DEBUG oslo_concurrency.lockutils [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "1c56d4af-ba43-4141-86d6-880ff384041e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.155480] env[62507]: DEBUG oslo_concurrency.lockutils [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "1c56d4af-ba43-4141-86d6-880ff384041e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.155480] env[62507]: DEBUG oslo_concurrency.lockutils [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "1c56d4af-ba43-4141-86d6-880ff384041e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.157594] env[62507]: INFO nova.compute.manager [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Terminating instance [ 2219.160616] env[62507]: DEBUG nova.compute.manager [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2219.160834] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2219.161386] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3007ec5a-0d38-48d6-9748-d2c41e1d7181 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.170668] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87662238-aaba-492c-9a28-f00edd47ee8c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.198335] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c56d4af-ba43-4141-86d6-880ff384041e could not be found. [ 2219.198523] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2219.198699] env[62507]: INFO nova.compute.manager [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2219.198937] env[62507]: DEBUG oslo.service.loopingcall [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2219.199171] env[62507]: DEBUG nova.compute.manager [-] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2219.199264] env[62507]: DEBUG nova.network.neutron [-] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2219.222247] env[62507]: DEBUG nova.network.neutron [-] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.230664] env[62507]: INFO nova.compute.manager [-] [instance: 1c56d4af-ba43-4141-86d6-880ff384041e] Took 0.03 seconds to deallocate network for instance. [ 2219.322874] env[62507]: DEBUG oslo_concurrency.lockutils [None req-1ee0b396-e24e-4310-b242-5ef775c39ac9 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "1c56d4af-ba43-4141-86d6-880ff384041e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.169053] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2262.167712] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2262.168754] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2262.168754] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2262.189819] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190130] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190130] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190130] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190256] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190383] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190510] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190630] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190748] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2262.190869] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2263.187014] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.167596] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.167845] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2264.226639] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "15b28973-c067-45be-ad64-b9315f033824" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.226875] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "15b28973-c067-45be-ad64-b9315f033824" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.240882] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2264.251477] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "b609718a-b93b-4398-9ffd-1a4dca9f0753" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.251728] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "b609718a-b93b-4398-9ffd-1a4dca9f0753" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.291614] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.291847] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.293994] env[62507]: INFO nova.compute.claims [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2264.461063] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f16f60-8c07-4f2c-8f42-67bff47bf177 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.468832] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09a17b9-a01c-426a-9b87-a204e5596ed9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.498958] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8a529b-daf4-423c-8d43-9464df9174a1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.506234] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27eadd18-9133-49e7-926b-d0bf15f72ffe {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.519523] env[62507]: DEBUG nova.compute.provider_tree [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2264.528565] env[62507]: DEBUG nova.scheduler.client.report [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2264.543623] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.252s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.544109] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2264.578563] env[62507]: DEBUG nova.compute.utils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2264.580076] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2264.580247] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2264.588878] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2264.641070] env[62507]: DEBUG nova.policy [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec30c348fdca4b22a99ab020dfb776f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b70942ca8c3d422a8d7740aad1324a6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 2264.649179] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2264.674337] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2264.674596] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2264.674764] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2264.674946] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2264.675111] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2264.675257] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2264.675465] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2264.675629] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2264.675802] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2264.675969] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2264.676161] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2264.677037] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b24403-0216-4fd7-a4f4-3fb0037e1517 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.684826] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37198ae5-4fa1-473f-b501-9d9276d50120 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.006627] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Successfully created port: 64b89813-24eb-4145-85dd-24053b31b10c {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2265.168334] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.168334] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.168334] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.528394] env[62507]: DEBUG nova.compute.manager [req-0d6d054b-1a47-437f-9ae2-d6bb1cf37742 req-6495bdff-b17f-4847-be9e-2879815856f1 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Received event network-vif-plugged-64b89813-24eb-4145-85dd-24053b31b10c {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2265.528623] env[62507]: DEBUG oslo_concurrency.lockutils [req-0d6d054b-1a47-437f-9ae2-d6bb1cf37742 req-6495bdff-b17f-4847-be9e-2879815856f1 service nova] Acquiring lock "15b28973-c067-45be-ad64-b9315f033824-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2265.528844] env[62507]: DEBUG oslo_concurrency.lockutils [req-0d6d054b-1a47-437f-9ae2-d6bb1cf37742 req-6495bdff-b17f-4847-be9e-2879815856f1 service nova] Lock "15b28973-c067-45be-ad64-b9315f033824-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.529054] env[62507]: DEBUG oslo_concurrency.lockutils [req-0d6d054b-1a47-437f-9ae2-d6bb1cf37742 req-6495bdff-b17f-4847-be9e-2879815856f1 service nova] Lock "15b28973-c067-45be-ad64-b9315f033824-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.529265] env[62507]: DEBUG nova.compute.manager [req-0d6d054b-1a47-437f-9ae2-d6bb1cf37742 req-6495bdff-b17f-4847-be9e-2879815856f1 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] No waiting events found dispatching network-vif-plugged-64b89813-24eb-4145-85dd-24053b31b10c {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2265.529441] env[62507]: WARNING nova.compute.manager [req-0d6d054b-1a47-437f-9ae2-d6bb1cf37742 req-6495bdff-b17f-4847-be9e-2879815856f1 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Received unexpected event network-vif-plugged-64b89813-24eb-4145-85dd-24053b31b10c for instance with vm_state building and task_state spawning. [ 2265.612259] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Successfully updated port: 64b89813-24eb-4145-85dd-24053b31b10c {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2265.624429] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "refresh_cache-15b28973-c067-45be-ad64-b9315f033824" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2265.624429] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "refresh_cache-15b28973-c067-45be-ad64-b9315f033824" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2265.624429] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2265.664535] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2265.830590] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Updating instance_info_cache with network_info: [{"id": "64b89813-24eb-4145-85dd-24053b31b10c", "address": "fa:16:3e:6e:43:86", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64b89813-24", "ovs_interfaceid": "64b89813-24eb-4145-85dd-24053b31b10c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2265.841218] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "refresh_cache-15b28973-c067-45be-ad64-b9315f033824" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2265.841493] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Instance network_info: |[{"id": "64b89813-24eb-4145-85dd-24053b31b10c", "address": "fa:16:3e:6e:43:86", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64b89813-24", "ovs_interfaceid": "64b89813-24eb-4145-85dd-24053b31b10c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2265.841897] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:43:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64b89813-24eb-4145-85dd-24053b31b10c', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2265.849357] env[62507]: DEBUG oslo.service.loopingcall [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2265.849818] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15b28973-c067-45be-ad64-b9315f033824] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2265.850062] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7f652aa-b133-4612-a096-8c04d63fa1f5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.870102] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2265.870102] env[62507]: value = "task-2460139" [ 2265.870102] env[62507]: _type = "Task" [ 2265.870102] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.877609] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460139, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.380749] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460139, 'name': CreateVM_Task, 'duration_secs': 0.287593} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.381053] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15b28973-c067-45be-ad64-b9315f033824] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2266.381590] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2266.381756] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2266.382094] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2266.382340] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f2346d-d8a6-49ae-942c-c74f7ed46536 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.386550] env[62507]: DEBUG oslo_vmware.api [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2266.386550] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52cf0f54-ea3a-2966-a217-4981a1730a0a" [ 2266.386550] env[62507]: _type = "Task" [ 2266.386550] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.393837] env[62507]: DEBUG oslo_vmware.api [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52cf0f54-ea3a-2966-a217-4981a1730a0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.897156] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2266.897418] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 15b28973-c067-45be-ad64-b9315f033824] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2266.897659] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2267.167602] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.179546] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.179766] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.179934] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.180103] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2267.181189] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e3fad6-33eb-4f4e-914e-3d66c18a3de6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.189886] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f711a999-d53c-4d86-9525-0d911fc78cf6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.203154] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba0b499-7835-4e9c-9284-a8b52f517321 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.209017] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ee9708-3dce-4749-8dee-3dff1c790b26 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.238162] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181170MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2267.238321] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.238518] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.307482] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 39471434-14af-468a-8b55-5fc58957e7b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.307680] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 16295fdd-45d6-492f-99d9-1006ec42c097 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.307680] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.307847] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.307889] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.308067] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.308207] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.308332] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 33af3273-6d4b-435d-8c40-cdbac591a84f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.308461] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d27a54c7-96d0-467c-8cb2-8b23de43c107 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.308613] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 15b28973-c067-45be-ad64-b9315f033824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.320476] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b609718a-b93b-4398-9ffd-1a4dca9f0753 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2267.320726] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2267.320882] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2267.352040] env[62507]: WARNING oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2267.352040] env[62507]: ERROR oslo_vmware.rw_handles [ 2267.352399] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2267.355070] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2267.355334] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Copying Virtual Disk [datastore2] vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/76658e3a-5128-454a-a1ee-da57a1b55979/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2267.355866] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cb1961b-4b5a-4916-a35f-3293d37aa988 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.365737] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2267.365737] env[62507]: value = "task-2460140" [ 2267.365737] env[62507]: _type = "Task" [ 2267.365737] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.374366] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': task-2460140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.449421] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3596518-5099-47e8-82a0-1eecae5fb3f6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.456161] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea53e579-e726-4310-aa4d-c3627530abf3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.484871] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb6aae7-cac0-47c7-b4e3-49e19558e629 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.491317] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1ee267-8a37-485a-abfb-8c96bbf6b9ab {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.504113] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.512254] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2267.525604] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2267.525604] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.287s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.555864] env[62507]: DEBUG nova.compute.manager [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Received event network-changed-64b89813-24eb-4145-85dd-24053b31b10c {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2267.556077] env[62507]: DEBUG nova.compute.manager [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Refreshing instance network info cache due to event network-changed-64b89813-24eb-4145-85dd-24053b31b10c. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2267.556281] env[62507]: DEBUG oslo_concurrency.lockutils [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] Acquiring lock "refresh_cache-15b28973-c067-45be-ad64-b9315f033824" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2267.556412] env[62507]: DEBUG oslo_concurrency.lockutils [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] Acquired lock "refresh_cache-15b28973-c067-45be-ad64-b9315f033824" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.558632] env[62507]: DEBUG nova.network.neutron [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Refreshing network info cache for port 64b89813-24eb-4145-85dd-24053b31b10c {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2267.796759] env[62507]: DEBUG nova.network.neutron [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Updated VIF entry in instance network info cache for port 64b89813-24eb-4145-85dd-24053b31b10c. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2267.797133] env[62507]: DEBUG nova.network.neutron [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] [instance: 15b28973-c067-45be-ad64-b9315f033824] Updating instance_info_cache with network_info: [{"id": "64b89813-24eb-4145-85dd-24053b31b10c", "address": "fa:16:3e:6e:43:86", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64b89813-24", "ovs_interfaceid": "64b89813-24eb-4145-85dd-24053b31b10c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.806153] env[62507]: DEBUG oslo_concurrency.lockutils [req-3b766b67-cac7-41dc-accd-7f5e26a666ce req-886bc164-3b11-4926-8d67-a365f3240728 service nova] Releasing lock "refresh_cache-15b28973-c067-45be-ad64-b9315f033824" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2267.875387] env[62507]: DEBUG oslo_vmware.exceptions [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2267.875600] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2267.876147] env[62507]: ERROR nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.876147] env[62507]: Faults: ['InvalidArgument'] [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Traceback (most recent call last): [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] yield resources [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self.driver.spawn(context, instance, image_meta, [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self._fetch_image_if_missing(context, vi) [ 2267.876147] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] image_cache(vi, tmp_image_ds_loc) [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] vm_util.copy_virtual_disk( [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] session._wait_for_task(vmdk_copy_task) [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] return self.wait_for_task(task_ref) [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] return evt.wait() [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] result = hub.switch() [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2267.876499] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] return self.greenlet.switch() [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self.f(*self.args, **self.kw) [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] raise exceptions.translate_fault(task_info.error) [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Faults: ['InvalidArgument'] [ 2267.876909] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] [ 2267.876909] env[62507]: INFO nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Terminating instance [ 2267.877957] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.878186] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2267.878420] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef35d9e8-9c6b-449e-825d-a46771693b3e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.880650] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2267.880847] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2267.881552] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c817db29-7569-4286-8bb2-5b24dc84c9f9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.888143] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2267.889048] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d83fba41-3425-41c9-951b-71f401edfb53 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.890351] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2267.890526] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2267.891178] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f38805ec-d420-403a-a239-7a7a3ccad43e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.895743] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2267.895743] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e4a4d3-823b-43b4-1c1c-aa50db1b0986" [ 2267.895743] env[62507]: _type = "Task" [ 2267.895743] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.902657] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e4a4d3-823b-43b4-1c1c-aa50db1b0986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.965673] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2267.965888] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2267.966090] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Deleting the datastore file [datastore2] 39471434-14af-468a-8b55-5fc58957e7b6 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2267.966356] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29575449-b433-403c-b6dc-4582574bb9f7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.972479] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2267.972479] env[62507]: value = "task-2460142" [ 2267.972479] env[62507]: _type = "Task" [ 2267.972479] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.979942] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': task-2460142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.406562] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2268.406885] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating directory with path [datastore2] vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2268.407141] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68e8259d-def5-445e-937c-1014674504e9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.419057] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Created directory with path [datastore2] vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2268.419057] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Fetch image to [datastore2] vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2268.419279] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2268.419980] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a910ab-3f04-403c-ab62-c3080aecfd5c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.426770] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ae2abd-c0c0-4210-8f51-b75815c6bd3f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.435837] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23704729-0964-41aa-b2a3-e8477b5f7a90 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.467433] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52251cee-7b82-43b3-96e0-43cb4b8b7077 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.472546] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-338a994c-301e-4206-97ba-88d13cf2c8d6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.481287] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': task-2460142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064377} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.481509] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2268.481688] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2268.481858] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2268.482043] env[62507]: INFO nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2268.484069] env[62507]: DEBUG nova.compute.claims [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2268.484244] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.484456] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.497408] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2268.525156] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2268.549286] env[62507]: DEBUG oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2268.613507] env[62507]: DEBUG oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2268.613702] env[62507]: DEBUG oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2268.724015] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b852140a-7b66-433e-87ba-240b7f440f96 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.729611] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc8afed-88c4-4fce-bf70-3e64aedaaecb {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.759333] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf0edff-9f64-486d-844d-aa476569add1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.766085] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ba5d42-fdb1-4caf-8ba1-4ddf234b5201 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.778850] env[62507]: DEBUG nova.compute.provider_tree [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2268.788363] env[62507]: DEBUG nova.scheduler.client.report [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2268.801538] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.317s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.802243] env[62507]: ERROR nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2268.802243] env[62507]: Faults: ['InvalidArgument'] [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Traceback (most recent call last): [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self.driver.spawn(context, instance, image_meta, [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self._fetch_image_if_missing(context, vi) [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] image_cache(vi, tmp_image_ds_loc) [ 2268.802243] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] vm_util.copy_virtual_disk( [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] session._wait_for_task(vmdk_copy_task) [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] return self.wait_for_task(task_ref) [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] return evt.wait() [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] result = hub.switch() [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] return self.greenlet.switch() [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2268.802644] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] self.f(*self.args, **self.kw) [ 2268.802964] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2268.802964] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] raise exceptions.translate_fault(task_info.error) [ 2268.802964] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2268.802964] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Faults: ['InvalidArgument'] [ 2268.802964] env[62507]: ERROR nova.compute.manager [instance: 39471434-14af-468a-8b55-5fc58957e7b6] [ 2268.803521] env[62507]: DEBUG nova.compute.utils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2268.804922] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Build of instance 39471434-14af-468a-8b55-5fc58957e7b6 was re-scheduled: A specified parameter was not correct: fileType [ 2268.804922] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2268.805343] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2268.805519] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2268.805694] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2268.805860] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2269.091053] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2269.104031] env[62507]: INFO nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Took 0.30 seconds to deallocate network for instance. [ 2269.193980] env[62507]: INFO nova.scheduler.client.report [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Deleted allocations for instance 39471434-14af-468a-8b55-5fc58957e7b6 [ 2269.219785] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "39471434-14af-468a-8b55-5fc58957e7b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 594.116s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.220929] env[62507]: DEBUG oslo_concurrency.lockutils [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "39471434-14af-468a-8b55-5fc58957e7b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 398.233s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.221175] env[62507]: DEBUG oslo_concurrency.lockutils [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "39471434-14af-468a-8b55-5fc58957e7b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.221383] env[62507]: DEBUG oslo_concurrency.lockutils [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "39471434-14af-468a-8b55-5fc58957e7b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.221552] env[62507]: DEBUG oslo_concurrency.lockutils [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "39471434-14af-468a-8b55-5fc58957e7b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.223509] env[62507]: INFO nova.compute.manager [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Terminating instance [ 2269.225167] env[62507]: DEBUG nova.compute.manager [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2269.225433] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2269.225911] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffc03f42-3171-43af-9132-305f9726c5df {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.235900] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a479bc-f596-44eb-80e1-0ac65fbaf131 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.248078] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2269.269121] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 39471434-14af-468a-8b55-5fc58957e7b6 could not be found. [ 2269.269334] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2269.269516] env[62507]: INFO nova.compute.manager [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2269.270866] env[62507]: DEBUG oslo.service.loopingcall [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2269.271272] env[62507]: DEBUG nova.compute.manager [-] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2269.271392] env[62507]: DEBUG nova.network.neutron [-] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2269.293962] env[62507]: DEBUG nova.network.neutron [-] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2269.296778] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.297041] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.298589] env[62507]: INFO nova.compute.claims [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2269.301706] env[62507]: INFO nova.compute.manager [-] [instance: 39471434-14af-468a-8b55-5fc58957e7b6] Took 0.03 seconds to deallocate network for instance. [ 2269.385035] env[62507]: DEBUG oslo_concurrency.lockutils [None req-da37e667-26f9-41bd-a6f5-4f8a8188e38e tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "39471434-14af-468a-8b55-5fc58957e7b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.472547] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34959dae-0362-47fe-b554-e34e10e632ee {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.480157] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cefc6c6-3f21-4c8d-81e4-af358243af32 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.511129] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26d3944-3471-49ea-ae49-a074f9728c23 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.518059] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c4b0db-8e23-4a66-a4b8-3af5764d0eb7 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.530806] env[62507]: DEBUG nova.compute.provider_tree [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2269.539479] env[62507]: DEBUG nova.scheduler.client.report [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2269.553263] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.256s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.553712] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2269.585605] env[62507]: DEBUG nova.compute.utils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2269.587112] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2269.587291] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2269.594882] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2269.648743] env[62507]: DEBUG nova.policy [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec30c348fdca4b22a99ab020dfb776f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b70942ca8c3d422a8d7740aad1324a6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 2269.655622] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2269.679092] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2269.679346] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2269.679505] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2269.679688] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2269.679842] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2269.679992] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2269.680228] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2269.680392] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2269.680563] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2269.680728] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2269.680905] env[62507]: DEBUG nova.virt.hardware [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2269.681769] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d188b170-5d1d-4609-8b3d-55eb8dc72fbc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.689235] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f514a49-68ca-4314-9c7c-85803cfd497c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.155557] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Successfully created port: c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2270.770794] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Successfully updated port: c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2270.780777] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "refresh_cache-b609718a-b93b-4398-9ffd-1a4dca9f0753" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.780924] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "refresh_cache-b609718a-b93b-4398-9ffd-1a4dca9f0753" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2270.781085] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2270.826247] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2270.999554] env[62507]: DEBUG nova.network.neutron [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Updating instance_info_cache with network_info: [{"id": "c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa", "address": "fa:16:3e:be:cf:56", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2e0fca9-5c", "ovs_interfaceid": "c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2271.010379] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "refresh_cache-b609718a-b93b-4398-9ffd-1a4dca9f0753" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2271.010674] env[62507]: DEBUG nova.compute.manager [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Instance network_info: |[{"id": "c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa", "address": "fa:16:3e:be:cf:56", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2e0fca9-5c", "ovs_interfaceid": "c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2271.011097] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:cf:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2271.018647] env[62507]: DEBUG oslo.service.loopingcall [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2271.019104] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2271.019345] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3384f094-6028-473a-8e42-5107ad3654b8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.039110] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2271.039110] env[62507]: value = "task-2460143" [ 2271.039110] env[62507]: _type = "Task" [ 2271.039110] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.046608] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460143, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.552559] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460143, 'name': CreateVM_Task} progress is 99%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.631615] env[62507]: DEBUG nova.compute.manager [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Received event network-vif-plugged-c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2271.631839] env[62507]: DEBUG oslo_concurrency.lockutils [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] Acquiring lock "b609718a-b93b-4398-9ffd-1a4dca9f0753-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.632066] env[62507]: DEBUG oslo_concurrency.lockutils [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] Lock "b609718a-b93b-4398-9ffd-1a4dca9f0753-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.632244] env[62507]: DEBUG oslo_concurrency.lockutils [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] Lock "b609718a-b93b-4398-9ffd-1a4dca9f0753-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2271.632413] env[62507]: DEBUG nova.compute.manager [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] No waiting events found dispatching network-vif-plugged-c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2271.632580] env[62507]: WARNING nova.compute.manager [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Received unexpected event network-vif-plugged-c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa for instance with vm_state building and task_state spawning. [ 2271.632745] env[62507]: DEBUG nova.compute.manager [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Received event network-changed-c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2271.632905] env[62507]: DEBUG nova.compute.manager [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Refreshing instance network info cache due to event network-changed-c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2271.633160] env[62507]: DEBUG oslo_concurrency.lockutils [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] Acquiring lock "refresh_cache-b609718a-b93b-4398-9ffd-1a4dca9f0753" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2271.633331] env[62507]: DEBUG oslo_concurrency.lockutils [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] Acquired lock "refresh_cache-b609718a-b93b-4398-9ffd-1a4dca9f0753" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2271.633499] env[62507]: DEBUG nova.network.neutron [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Refreshing network info cache for port c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2271.921287] env[62507]: DEBUG nova.network.neutron [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Updated VIF entry in instance network info cache for port c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2271.921644] env[62507]: DEBUG nova.network.neutron [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Updating instance_info_cache with network_info: [{"id": "c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa", "address": "fa:16:3e:be:cf:56", "network": {"id": "d0177494-e3d0-4799-ab18-6499f0c41488", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-812838737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b70942ca8c3d422a8d7740aad1324a6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2e0fca9-5c", "ovs_interfaceid": "c2e0fca9-5c08-4e9a-8d43-5115cc26f7aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2271.930846] env[62507]: DEBUG oslo_concurrency.lockutils [req-8cd910c0-6ed9-4d72-b015-8d5ef3f221eb req-cf1fd050-6bf2-4279-b98a-b1b476c8f814 service nova] Releasing lock "refresh_cache-b609718a-b93b-4398-9ffd-1a4dca9f0753" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.049097] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460143, 'name': CreateVM_Task} progress is 99%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.549820] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460143, 'name': CreateVM_Task, 'duration_secs': 1.227783} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.549985] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2272.550691] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.550863] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2272.551211] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2272.551460] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4f3ab90-4bcd-4a98-b580-3b87adc8a93d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.555806] env[62507]: DEBUG oslo_vmware.api [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2272.555806] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52a82881-7676-c078-195e-c410381331e9" [ 2272.555806] env[62507]: _type = "Task" [ 2272.555806] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.563226] env[62507]: DEBUG oslo_vmware.api [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52a82881-7676-c078-195e-c410381331e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.066307] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2273.066709] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2273.066803] env[62507]: DEBUG oslo_concurrency.lockutils [None req-6c49cd13-f4af-41f0-8267-4e60cdb528fb tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2311.331836] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2311.332375] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Getting list of instances from cluster (obj){ [ 2311.332375] env[62507]: value = "domain-c8" [ 2311.332375] env[62507]: _type = "ClusterComputeResource" [ 2311.332375] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2311.333421] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cd0a4f-14b6-4c7e-a3a6-bb7bee79e3e6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.349609] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Got total of 10 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2318.255283] env[62507]: WARNING oslo_vmware.rw_handles [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2318.255283] env[62507]: ERROR oslo_vmware.rw_handles [ 2318.255831] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2318.258460] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2318.258759] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Copying Virtual Disk [datastore2] vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/e0449847-4b17-4267-a650-4f4210f66336/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2318.259107] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7c72148-d905-44d7-b9be-0833c0977720 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.267818] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2318.267818] env[62507]: value = "task-2460144" [ 2318.267818] env[62507]: _type = "Task" [ 2318.267818] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.276450] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': task-2460144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.778497] env[62507]: DEBUG oslo_vmware.exceptions [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2318.778769] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2318.779343] env[62507]: ERROR nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2318.779343] env[62507]: Faults: ['InvalidArgument'] [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Traceback (most recent call last): [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] yield resources [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self.driver.spawn(context, instance, image_meta, [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self._fetch_image_if_missing(context, vi) [ 2318.779343] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] image_cache(vi, tmp_image_ds_loc) [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] vm_util.copy_virtual_disk( [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] session._wait_for_task(vmdk_copy_task) [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] return self.wait_for_task(task_ref) [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] return evt.wait() [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] result = hub.switch() [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2318.779753] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] return self.greenlet.switch() [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self.f(*self.args, **self.kw) [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] raise exceptions.translate_fault(task_info.error) [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Faults: ['InvalidArgument'] [ 2318.780228] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] [ 2318.780228] env[62507]: INFO nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Terminating instance [ 2318.781226] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2318.781436] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2318.781686] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-334a5b9b-c17d-4fe5-ac22-72d85061e60a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.783747] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2318.783938] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2318.784642] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f96351-dd37-445e-8101-28b295944510 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.791226] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2318.791394] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-861685ae-0d72-46d1-b677-27e8c0ca4d15 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.793432] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2318.793608] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2318.794522] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfc330f2-aafd-4294-a5db-ad7e379d1d88 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.799009] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 2318.799009] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]525f4c32-d732-e921-d8b5-11db25814b83" [ 2318.799009] env[62507]: _type = "Task" [ 2318.799009] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.805850] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]525f4c32-d732-e921-d8b5-11db25814b83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.865189] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2318.865410] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2318.865589] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Deleting the datastore file [datastore2] 16295fdd-45d6-492f-99d9-1006ec42c097 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2318.865836] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-634183a5-27e4-474a-b03c-81777fdc1e58 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.872008] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for the task: (returnval){ [ 2318.872008] env[62507]: value = "task-2460146" [ 2318.872008] env[62507]: _type = "Task" [ 2318.872008] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.879407] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': task-2460146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.309423] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2319.309688] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating directory with path [datastore2] vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2319.309932] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-754375db-da65-4669-bbf6-ad9c22faf94b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.321548] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Created directory with path [datastore2] vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2319.321734] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Fetch image to [datastore2] vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2319.321904] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2319.322623] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3543f2-0ff5-4780-8ec6-c08c38521f74 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.328835] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68c19e9-60b0-4ba2-9085-865f1c377a88 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.337643] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e26dfc-0b73-43d9-9ccd-89a4cea0a699 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.368901] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb50513f-5ad8-420c-b999-f60577102256 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.377350] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-679a3518-5271-4d87-b23a-ce88d8b11204 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.383616] env[62507]: DEBUG oslo_vmware.api [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Task: {'id': task-2460146, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064906} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2319.383864] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2319.384062] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2319.384240] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2319.384416] env[62507]: INFO nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2319.386535] env[62507]: DEBUG nova.compute.claims [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2319.386715] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.386928] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.398653] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2319.524069] env[62507]: DEBUG oslo_vmware.rw_handles [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2319.584575] env[62507]: DEBUG oslo_vmware.rw_handles [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2319.584766] env[62507]: DEBUG oslo_vmware.rw_handles [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2319.641645] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8918c8-3dac-4e06-94fb-e1ffa7f3f0bc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.648949] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcf176c-defd-4ee2-b392-d2af32119302 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.678903] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d048c6b-a88c-4a11-89b4-71ecb3a4f624 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.687062] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d6b461-6462-4de4-9367-76f1edeb6ed0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.700882] env[62507]: DEBUG nova.compute.provider_tree [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2319.709080] env[62507]: DEBUG nova.scheduler.client.report [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2319.723116] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.336s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.723690] env[62507]: ERROR nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2319.723690] env[62507]: Faults: ['InvalidArgument'] [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Traceback (most recent call last): [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self.driver.spawn(context, instance, image_meta, [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self._fetch_image_if_missing(context, vi) [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] image_cache(vi, tmp_image_ds_loc) [ 2319.723690] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] vm_util.copy_virtual_disk( [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] session._wait_for_task(vmdk_copy_task) [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] return self.wait_for_task(task_ref) [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] return evt.wait() [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] result = hub.switch() [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] return self.greenlet.switch() [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2319.724064] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] self.f(*self.args, **self.kw) [ 2319.724368] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2319.724368] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] raise exceptions.translate_fault(task_info.error) [ 2319.724368] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2319.724368] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Faults: ['InvalidArgument'] [ 2319.724368] env[62507]: ERROR nova.compute.manager [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] [ 2319.724368] env[62507]: DEBUG nova.compute.utils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2319.725707] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Build of instance 16295fdd-45d6-492f-99d9-1006ec42c097 was re-scheduled: A specified parameter was not correct: fileType [ 2319.725707] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2319.726210] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2319.726284] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2319.726461] env[62507]: DEBUG nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2319.726626] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2320.176401] env[62507]: DEBUG nova.network.neutron [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2320.187092] env[62507]: INFO nova.compute.manager [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Took 0.46 seconds to deallocate network for instance. [ 2320.277541] env[62507]: INFO nova.scheduler.client.report [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Deleted allocations for instance 16295fdd-45d6-492f-99d9-1006ec42c097 [ 2320.298178] env[62507]: DEBUG oslo_concurrency.lockutils [None req-147ec73d-5c09-492b-9740-246d69d2d6b0 tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "16295fdd-45d6-492f-99d9-1006ec42c097" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 645.171s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.298466] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "16295fdd-45d6-492f-99d9-1006ec42c097" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 449.437s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2320.298714] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Acquiring lock "16295fdd-45d6-492f-99d9-1006ec42c097-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2320.298930] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "16295fdd-45d6-492f-99d9-1006ec42c097-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2320.299127] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "16295fdd-45d6-492f-99d9-1006ec42c097-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.301413] env[62507]: INFO nova.compute.manager [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Terminating instance [ 2320.303289] env[62507]: DEBUG nova.compute.manager [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2320.303654] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2320.303968] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2cff681-ba31-4eb1-bea2-13d8fb222982 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.314247] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed60f07-6891-4222-b6db-8d7297131365 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.342511] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16295fdd-45d6-492f-99d9-1006ec42c097 could not be found. [ 2320.342733] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2320.342913] env[62507]: INFO nova.compute.manager [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2320.343209] env[62507]: DEBUG oslo.service.loopingcall [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2320.343482] env[62507]: DEBUG nova.compute.manager [-] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2320.343595] env[62507]: DEBUG nova.network.neutron [-] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2320.367981] env[62507]: DEBUG nova.network.neutron [-] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2320.375757] env[62507]: INFO nova.compute.manager [-] [instance: 16295fdd-45d6-492f-99d9-1006ec42c097] Took 0.03 seconds to deallocate network for instance. [ 2320.456837] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8908eeb2-c664-4d0d-b5f9-685c82890bab tempest-MultipleCreateTestJSON-1680863629 tempest-MultipleCreateTestJSON-1680863629-project-member] Lock "16295fdd-45d6-492f-99d9-1006ec42c097" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.158s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.203437] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2322.203705] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2322.203788] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2322.222917] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223079] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223209] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223337] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223462] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223588] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223711] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223831] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 15b28973-c067-45be-ad64-b9315f033824] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.223952] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2322.224086] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2322.224578] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.167445] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.167839] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.167952] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.168084] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2327.168949] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.169283] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.162860] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.183214] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.192977] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.193214] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.193383] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.193541] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2329.195823] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041c69a3-cbba-48a7-93d1-1f88f73523f3 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.204050] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa73e02f-7018-43b2-9d08-e01c31b6ca68 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.217950] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb05b79-a631-4179-8d81-8885a11df21f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.223902] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5049df6-d839-48fd-9c06-fb05b82d3610 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.252199] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181165MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2329.252349] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.252549] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.317054] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance a76d0987-29c2-423b-972c-990639986d5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317054] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317054] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317239] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317273] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317392] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 33af3273-6d4b-435d-8c40-cdbac591a84f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317510] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d27a54c7-96d0-467c-8cb2-8b23de43c107 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.317689] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 15b28973-c067-45be-ad64-b9315f033824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.318033] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b609718a-b93b-4398-9ffd-1a4dca9f0753 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2329.318033] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2329.318151] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2329.333489] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing inventories for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2329.346355] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating ProviderTree inventory for provider 40e67440-0925-46e5-9b58-6e63187cdfab from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2329.346539] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Updating inventory in ProviderTree for provider 40e67440-0925-46e5-9b58-6e63187cdfab with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2329.358194] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing aggregate associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, aggregates: None {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2329.375223] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Refreshing trait associations for resource provider 40e67440-0925-46e5-9b58-6e63187cdfab, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=62507) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2329.473274] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f1bd9b-8c13-4770-a564-b65ee35dd3c1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.481788] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4dad072-62a0-48eb-b7d0-f1b2db981c5e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.512914] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ae33e9-0177-44f7-a2f6-82247572a9a2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.520095] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d38c05d-fb94-4aca-a2c0-93943326fa57 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.532530] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2329.540582] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2329.554801] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2329.555049] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.302s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.539600] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.168499] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.177353] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.177727] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances with incomplete migration {{(pid=62507) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2340.763116] env[62507]: DEBUG oslo_concurrency.lockutils [None req-875369f8-f018-4e2a-b893-1afd1f6ff491 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "33af3273-6d4b-435d-8c40-cdbac591a84f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.177465] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2343.177825] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Cleaning up deleted instances {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2343.187824] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] There are 0 instances to clean {{(pid=62507) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2355.322784] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.347660] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Getting list of instances from cluster (obj){ [ 2355.347660] env[62507]: value = "domain-c8" [ 2355.347660] env[62507]: _type = "ClusterComputeResource" [ 2355.347660] env[62507]: } {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2355.348973] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb160437-f240-42d7-a93c-d42254200574 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.370682] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Got total of 9 instances {{(pid=62507) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2355.370916] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid a76d0987-29c2-423b-972c-990639986d5f {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.371227] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 56c176ec-c6e5-4f48-a5be-badef25c5667 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.371463] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 425b5171-97c2-4700-ad5f-c79aadb39eae {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.371676] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 479a1e78-23c0-4a96-aa72-aa419c8c251b {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.371887] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 7adef000-4700-4c2f-a7ea-09baf40cedf5 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.372116] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 33af3273-6d4b-435d-8c40-cdbac591a84f {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.372332] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid d27a54c7-96d0-467c-8cb2-8b23de43c107 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.372540] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid 15b28973-c067-45be-ad64-b9315f033824 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.372747] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Triggering sync for uuid b609718a-b93b-4398-9ffd-1a4dca9f0753 {{(pid=62507) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2355.373204] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "a76d0987-29c2-423b-972c-990639986d5f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.373541] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "56c176ec-c6e5-4f48-a5be-badef25c5667" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.373832] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "425b5171-97c2-4700-ad5f-c79aadb39eae" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.374138] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "479a1e78-23c0-4a96-aa72-aa419c8c251b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.374472] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "7adef000-4700-4c2f-a7ea-09baf40cedf5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.374697] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "33af3273-6d4b-435d-8c40-cdbac591a84f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.374970] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "d27a54c7-96d0-467c-8cb2-8b23de43c107" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.375259] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "15b28973-c067-45be-ad64-b9315f033824" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.375534] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "b609718a-b93b-4398-9ffd-1a4dca9f0753" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2367.391714] env[62507]: WARNING oslo_vmware.rw_handles [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2367.391714] env[62507]: ERROR oslo_vmware.rw_handles [ 2367.392585] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2367.394584] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2367.394914] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Copying Virtual Disk [datastore2] vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/efe0848e-c0e2-4100-8e7d-497b5e42648d/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2367.395323] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1d98568-f6ce-4f85-a9ec-36b190731f7c {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.403509] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 2367.403509] env[62507]: value = "task-2460147" [ 2367.403509] env[62507]: _type = "Task" [ 2367.403509] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.411634] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.929074] env[62507]: DEBUG oslo_vmware.exceptions [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2367.929074] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2367.929074] env[62507]: ERROR nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.929074] env[62507]: Faults: ['InvalidArgument'] [ 2367.929074] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] Traceback (most recent call last): [ 2367.929074] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2367.929074] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] yield resources [ 2367.929074] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2367.929074] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self.driver.spawn(context, instance, image_meta, [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self._fetch_image_if_missing(context, vi) [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] image_cache(vi, tmp_image_ds_loc) [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] vm_util.copy_virtual_disk( [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] session._wait_for_task(vmdk_copy_task) [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] return self.wait_for_task(task_ref) [ 2367.929486] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] return evt.wait() [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] result = hub.switch() [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] return self.greenlet.switch() [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self.f(*self.args, **self.kw) [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] raise exceptions.translate_fault(task_info.error) [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] Faults: ['InvalidArgument'] [ 2367.929815] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] [ 2367.930144] env[62507]: INFO nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Terminating instance [ 2367.930985] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2367.931346] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2367.931714] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91d5e4b1-9338-4780-af18-c4c1cb929481 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.937651] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2367.938037] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2367.938935] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4092e61f-f5ca-42e1-aa9c-37aedcbc38d9 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.951340] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2367.951720] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2367.954322] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-811b2c36-4fcd-487f-b578-888a2a6d404f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.956735] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2367.963620] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cba78e74-9d17-48fc-a6e7-04fa824c8cd1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.970857] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 2367.970857] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529c1839-ee88-f0ca-3cb0-093281c4e0bc" [ 2367.970857] env[62507]: _type = "Task" [ 2367.970857] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.986328] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]529c1839-ee88-f0ca-3cb0-093281c4e0bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2368.032498] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2368.033015] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2368.033350] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleting the datastore file [datastore2] a76d0987-29c2-423b-972c-990639986d5f {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2368.033749] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf60269d-3906-41d6-ad82-35ae3950ce10 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.043145] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for the task: (returnval){ [ 2368.043145] env[62507]: value = "task-2460149" [ 2368.043145] env[62507]: _type = "Task" [ 2368.043145] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2368.051099] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460149, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2368.481943] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2368.484922] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating directory with path [datastore2] vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2368.484922] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98db4dc3-064d-4e4d-afff-7cbcee078b77 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.497020] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Created directory with path [datastore2] vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2368.497020] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Fetch image to [datastore2] vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2368.497020] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2368.497020] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca090ac9-7b68-4daa-9b46-08ccbce17664 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.503418] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7558c4-dafd-4ecb-b335-64e34e04b8db {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.513594] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e269da9d-160b-422f-8fb1-796a59c4d692 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.548136] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cf5816-9cb3-4469-99cb-0b4ea32dd974 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.555085] env[62507]: DEBUG oslo_vmware.api [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Task: {'id': task-2460149, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07954} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2368.556639] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2368.556986] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2368.557346] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2368.557687] env[62507]: INFO nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2368.559615] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-68f56800-dd3b-4494-a286-83f3e3614ad4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.562093] env[62507]: DEBUG nova.compute.claims [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2368.562093] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2368.562093] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.586025] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2368.659946] env[62507]: DEBUG oslo_vmware.rw_handles [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2368.725052] env[62507]: DEBUG oslo_vmware.rw_handles [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2368.725609] env[62507]: DEBUG oslo_vmware.rw_handles [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2368.800816] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eb8d50-a0fe-458a-96c8-f0e606f3a916 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.808451] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d906fce-3442-44a3-8256-9ad5119c485f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.837317] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e777543-78bd-4de1-ba0b-f42eff40412a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.843917] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e577104-6df7-4861-81f4-fda202e71c90 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.857473] env[62507]: DEBUG nova.compute.provider_tree [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2368.865912] env[62507]: DEBUG nova.scheduler.client.report [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2368.916633] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.354s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.917220] env[62507]: ERROR nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2368.917220] env[62507]: Faults: ['InvalidArgument'] [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] Traceback (most recent call last): [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self.driver.spawn(context, instance, image_meta, [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self._fetch_image_if_missing(context, vi) [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] image_cache(vi, tmp_image_ds_loc) [ 2368.917220] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] vm_util.copy_virtual_disk( [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] session._wait_for_task(vmdk_copy_task) [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] return self.wait_for_task(task_ref) [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] return evt.wait() [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] result = hub.switch() [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] return self.greenlet.switch() [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2368.917538] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] self.f(*self.args, **self.kw) [ 2368.917841] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2368.917841] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] raise exceptions.translate_fault(task_info.error) [ 2368.917841] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2368.917841] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] Faults: ['InvalidArgument'] [ 2368.917841] env[62507]: ERROR nova.compute.manager [instance: a76d0987-29c2-423b-972c-990639986d5f] [ 2368.917961] env[62507]: DEBUG nova.compute.utils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2368.919600] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Build of instance a76d0987-29c2-423b-972c-990639986d5f was re-scheduled: A specified parameter was not correct: fileType [ 2368.919600] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2368.919966] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2368.920156] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2368.920334] env[62507]: DEBUG nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2368.920504] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2369.217309] env[62507]: DEBUG nova.network.neutron [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2369.227907] env[62507]: INFO nova.compute.manager [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Took 0.31 seconds to deallocate network for instance. [ 2369.329637] env[62507]: INFO nova.scheduler.client.report [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Deleted allocations for instance a76d0987-29c2-423b-972c-990639986d5f [ 2369.357882] env[62507]: DEBUG oslo_concurrency.lockutils [None req-2aefb887-9bbd-4794-ad63-64bc9c9f61cb tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "a76d0987-29c2-423b-972c-990639986d5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 688.415s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2369.358246] env[62507]: DEBUG oslo_concurrency.lockutils [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "a76d0987-29c2-423b-972c-990639986d5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 492.478s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2369.358497] env[62507]: DEBUG oslo_concurrency.lockutils [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Acquiring lock "a76d0987-29c2-423b-972c-990639986d5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2369.358718] env[62507]: DEBUG oslo_concurrency.lockutils [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "a76d0987-29c2-423b-972c-990639986d5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2369.358897] env[62507]: DEBUG oslo_concurrency.lockutils [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "a76d0987-29c2-423b-972c-990639986d5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2369.363687] env[62507]: INFO nova.compute.manager [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Terminating instance [ 2369.365593] env[62507]: DEBUG nova.compute.manager [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2369.365796] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2369.366340] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f54521b-79eb-493e-9594-b890fc9798af {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.375335] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffef0a2e-e190-4267-ab4f-7a78bc887236 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.403909] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a76d0987-29c2-423b-972c-990639986d5f could not be found. [ 2369.404126] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2369.404312] env[62507]: INFO nova.compute.manager [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] [instance: a76d0987-29c2-423b-972c-990639986d5f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2369.404559] env[62507]: DEBUG oslo.service.loopingcall [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2369.404789] env[62507]: DEBUG nova.compute.manager [-] [instance: a76d0987-29c2-423b-972c-990639986d5f] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2369.404884] env[62507]: DEBUG nova.network.neutron [-] [instance: a76d0987-29c2-423b-972c-990639986d5f] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2369.429944] env[62507]: DEBUG nova.network.neutron [-] [instance: a76d0987-29c2-423b-972c-990639986d5f] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2369.460018] env[62507]: INFO nova.compute.manager [-] [instance: a76d0987-29c2-423b-972c-990639986d5f] Took 0.05 seconds to deallocate network for instance. [ 2369.547336] env[62507]: DEBUG oslo_concurrency.lockutils [None req-91642030-79c8-4555-9615-873caac607dd tempest-AttachVolumeNegativeTest-590541527 tempest-AttachVolumeNegativeTest-590541527-project-member] Lock "a76d0987-29c2-423b-972c-990639986d5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2369.548653] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "a76d0987-29c2-423b-972c-990639986d5f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.175s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2369.548653] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: a76d0987-29c2-423b-972c-990639986d5f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2369.548653] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "a76d0987-29c2-423b-972c-990639986d5f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.291653] env[62507]: DEBUG oslo_concurrency.lockutils [None req-30fe2b78-ddef-4397-ab0e-eae7a43a33e5 tempest-ServersTestJSON-774017569 tempest-ServersTestJSON-774017569-project-member] Acquiring lock "d27a54c7-96d0-467c-8cb2-8b23de43c107" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.306011] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "9f75f6ca-35a2-44d8-8b2a-ecf14b236421" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.306336] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "9f75f6ca-35a2-44d8-8b2a-ecf14b236421" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.315045] env[62507]: DEBUG nova.compute.manager [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Starting instance... {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2373.389828] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.390102] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.391626] env[62507]: INFO nova.compute.claims [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2373.558037] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674454c9-ac0b-44e0-bf7d-504b259c5c00 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.565699] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6673d2b5-3f1d-4b18-ba44-f07b0617aec0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.596496] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d00259-24f8-413b-ba2a-613eb42d7d57 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.603249] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95019be7-080e-445f-958a-8d884581f3d0 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.617042] env[62507]: DEBUG nova.compute.provider_tree [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2373.625671] env[62507]: DEBUG nova.scheduler.client.report [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2373.638736] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.249s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.639252] env[62507]: DEBUG nova.compute.manager [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Start building networks asynchronously for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2373.672462] env[62507]: DEBUG nova.compute.utils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Using /dev/sd instead of None {{(pid=62507) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2373.673831] env[62507]: DEBUG nova.compute.manager [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Allocating IP information in the background. {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2373.674014] env[62507]: DEBUG nova.network.neutron [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] allocate_for_instance() {{(pid=62507) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2373.682609] env[62507]: DEBUG nova.compute.manager [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Start building block device mappings for instance. {{(pid=62507) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2373.743675] env[62507]: DEBUG nova.policy [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8187d3d405c244f995763c4d67515b6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c850b58d9b554e81b09f26703a6f50f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62507) authorize /opt/stack/nova/nova/policy.py:203}} [ 2373.753077] env[62507]: DEBUG nova.compute.manager [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Start spawning the instance on the hypervisor. {{(pid=62507) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2373.778727] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-12T01:14:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-12T01:14:38Z,direct_url=,disk_format='vmdk',id=601dc712-1d53-404c-b128-df5971f300a1,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6379394ace14a7e97a0396f7e1277db',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-12T01:14:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2373.779062] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2373.779233] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image limits 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2373.779300] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Flavor pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2373.779483] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Image pref 0:0:0 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2373.779640] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62507) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2373.779850] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2373.780150] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2373.780563] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Got 1 possible topologies {{(pid=62507) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2373.780563] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2373.780729] env[62507]: DEBUG nova.virt.hardware [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62507) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2373.781579] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb6dae4-746e-46bf-a580-e7fb3984882e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.789489] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b060ab40-6123-442a-88ab-abfb4522013d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.042465] env[62507]: DEBUG nova.network.neutron [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Successfully created port: a1e21fb5-c102-4793-9c9d-b2b635d59068 {{(pid=62507) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2374.656728] env[62507]: DEBUG nova.compute.manager [req-2a81bf16-9cc0-46fa-8324-9e360c00c68d req-c3da51ab-5715-4c7a-865d-6623be43606e service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Received event network-vif-plugged-a1e21fb5-c102-4793-9c9d-b2b635d59068 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2374.657037] env[62507]: DEBUG oslo_concurrency.lockutils [req-2a81bf16-9cc0-46fa-8324-9e360c00c68d req-c3da51ab-5715-4c7a-865d-6623be43606e service nova] Acquiring lock "9f75f6ca-35a2-44d8-8b2a-ecf14b236421-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2374.657191] env[62507]: DEBUG oslo_concurrency.lockutils [req-2a81bf16-9cc0-46fa-8324-9e360c00c68d req-c3da51ab-5715-4c7a-865d-6623be43606e service nova] Lock "9f75f6ca-35a2-44d8-8b2a-ecf14b236421-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2374.657363] env[62507]: DEBUG oslo_concurrency.lockutils [req-2a81bf16-9cc0-46fa-8324-9e360c00c68d req-c3da51ab-5715-4c7a-865d-6623be43606e service nova] Lock "9f75f6ca-35a2-44d8-8b2a-ecf14b236421-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.657566] env[62507]: DEBUG nova.compute.manager [req-2a81bf16-9cc0-46fa-8324-9e360c00c68d req-c3da51ab-5715-4c7a-865d-6623be43606e service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] No waiting events found dispatching network-vif-plugged-a1e21fb5-c102-4793-9c9d-b2b635d59068 {{(pid=62507) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2374.657754] env[62507]: WARNING nova.compute.manager [req-2a81bf16-9cc0-46fa-8324-9e360c00c68d req-c3da51ab-5715-4c7a-865d-6623be43606e service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Received unexpected event network-vif-plugged-a1e21fb5-c102-4793-9c9d-b2b635d59068 for instance with vm_state building and task_state spawning. [ 2374.736752] env[62507]: DEBUG nova.network.neutron [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Successfully updated port: a1e21fb5-c102-4793-9c9d-b2b635d59068 {{(pid=62507) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2374.748166] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "refresh_cache-9f75f6ca-35a2-44d8-8b2a-ecf14b236421" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2374.748413] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "refresh_cache-9f75f6ca-35a2-44d8-8b2a-ecf14b236421" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2374.748812] env[62507]: DEBUG nova.network.neutron [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Building network info cache for instance {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2374.790295] env[62507]: DEBUG nova.network.neutron [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Instance cache missing network info. {{(pid=62507) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2374.946678] env[62507]: DEBUG nova.network.neutron [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Updating instance_info_cache with network_info: [{"id": "a1e21fb5-c102-4793-9c9d-b2b635d59068", "address": "fa:16:3e:5e:03:46", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1e21fb5-c1", "ovs_interfaceid": "a1e21fb5-c102-4793-9c9d-b2b635d59068", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2374.959166] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "refresh_cache-9f75f6ca-35a2-44d8-8b2a-ecf14b236421" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.959575] env[62507]: DEBUG nova.compute.manager [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Instance network_info: |[{"id": "a1e21fb5-c102-4793-9c9d-b2b635d59068", "address": "fa:16:3e:5e:03:46", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1e21fb5-c1", "ovs_interfaceid": "a1e21fb5-c102-4793-9c9d-b2b635d59068", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62507) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2374.959871] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:03:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '489b2441-7132-4942-8b61-49cf0ad4400e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1e21fb5-c102-4793-9c9d-b2b635d59068', 'vif_model': 'vmxnet3'}] {{(pid=62507) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2374.968380] env[62507]: DEBUG oslo.service.loopingcall [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2374.968843] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Creating VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2374.969140] env[62507]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae0cd680-6d7a-463c-84e5-0c84dd5f226a {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.989844] env[62507]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2374.989844] env[62507]: value = "task-2460150" [ 2374.989844] env[62507]: _type = "Task" [ 2374.989844] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.998280] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460150, 'name': CreateVM_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.500201] env[62507]: DEBUG oslo_vmware.api [-] Task: {'id': task-2460150, 'name': CreateVM_Task, 'duration_secs': 0.278701} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.500461] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Created VM on the ESX host {{(pid=62507) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2375.501127] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.501300] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2375.501631] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2375.501887] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f75f795-d9f0-4c0a-af1f-63bf04195da6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.506289] env[62507]: DEBUG oslo_vmware.api [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Waiting for the task: (returnval){ [ 2375.506289] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e81b72-cae0-0bf5-3344-7b7ab1be1dbc" [ 2375.506289] env[62507]: _type = "Task" [ 2375.506289] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2375.515267] env[62507]: DEBUG oslo_vmware.api [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]52e81b72-cae0-0bf5-3344-7b7ab1be1dbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2376.016808] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2376.017281] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Processing image 601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2376.017359] env[62507]: DEBUG oslo_concurrency.lockutils [None req-dba5cae1-305b-4eee-b769-bf92102f8712 tempest-ServersTestJSON-398374741 tempest-ServersTestJSON-398374741-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2376.735696] env[62507]: DEBUG nova.compute.manager [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Received event network-changed-a1e21fb5-c102-4793-9c9d-b2b635d59068 {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2376.735808] env[62507]: DEBUG nova.compute.manager [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Refreshing instance network info cache due to event network-changed-a1e21fb5-c102-4793-9c9d-b2b635d59068. {{(pid=62507) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2376.736030] env[62507]: DEBUG oslo_concurrency.lockutils [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] Acquiring lock "refresh_cache-9f75f6ca-35a2-44d8-8b2a-ecf14b236421" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2376.736191] env[62507]: DEBUG oslo_concurrency.lockutils [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] Acquired lock "refresh_cache-9f75f6ca-35a2-44d8-8b2a-ecf14b236421" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2376.736359] env[62507]: DEBUG nova.network.neutron [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Refreshing network info cache for port a1e21fb5-c102-4793-9c9d-b2b635d59068 {{(pid=62507) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2376.970583] env[62507]: DEBUG nova.network.neutron [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Updated VIF entry in instance network info cache for port a1e21fb5-c102-4793-9c9d-b2b635d59068. {{(pid=62507) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2376.970940] env[62507]: DEBUG nova.network.neutron [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Updating instance_info_cache with network_info: [{"id": "a1e21fb5-c102-4793-9c9d-b2b635d59068", "address": "fa:16:3e:5e:03:46", "network": {"id": "a17cad1d-200f-41fe-b1b3-5a098d4c4317", "bridge": "br-int", "label": "tempest-ServersTestJSON-1356127193-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c850b58d9b554e81b09f26703a6f50f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "489b2441-7132-4942-8b61-49cf0ad4400e", "external-id": "nsx-vlan-transportzone-971", "segmentation_id": 971, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1e21fb5-c1", "ovs_interfaceid": "a1e21fb5-c102-4793-9c9d-b2b635d59068", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.980109] env[62507]: DEBUG oslo_concurrency.lockutils [req-5670950c-10a1-4713-8f8f-c7c78663eb58 req-46cd3b02-0492-4e1f-8e37-86540c3a0820 service nova] Releasing lock "refresh_cache-9f75f6ca-35a2-44d8-8b2a-ecf14b236421" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2383.220987] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2383.221341] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2383.221341] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2383.243806] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.243962] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244107] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244245] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244374] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244497] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244619] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 15b28973-c067-45be-ad64-b9315f033824] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244738] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244856] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2383.244977] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2384.167314] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.168135] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.168632] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.168632] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62507) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2387.164283] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.166932] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.167301] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.167356] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.179212] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.179407] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.179573] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2389.179728] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62507) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2389.180840] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d70771-f9e5-4ffc-ba5e-dbda71ac3236 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.189558] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a33ce5-8cb0-49ad-8795-1c78fcf28dd4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.203368] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b3d917-8bff-4823-8223-3bb617e1812b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.209607] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e898333-9a76-400a-9808-4c8d6a802567 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.238889] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181168MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62507) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2389.239046] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.239231] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.311251] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.311417] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 425b5171-97c2-4700-ad5f-c79aadb39eae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.311549] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 479a1e78-23c0-4a96-aa72-aa419c8c251b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.311674] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 7adef000-4700-4c2f-a7ea-09baf40cedf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.311795] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 33af3273-6d4b-435d-8c40-cdbac591a84f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.311928] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance d27a54c7-96d0-467c-8cb2-8b23de43c107 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.312056] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 15b28973-c067-45be-ad64-b9315f033824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.312175] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance b609718a-b93b-4398-9ffd-1a4dca9f0753 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.312290] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Instance 9f75f6ca-35a2-44d8-8b2a-ecf14b236421 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62507) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2389.312487] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2389.312649] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62507) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2389.413531] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56c6b76-d9ca-414e-a302-26a342447ca5 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.421415] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8472100b-eb98-4252-9184-042655c3ffcc {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.450378] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3141e9-8d6a-4078-b9d9-4ac55ab39528 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.457009] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1336ae7c-f051-4b46-a40a-c53033b76ff8 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.469381] env[62507]: DEBUG nova.compute.provider_tree [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2389.477010] env[62507]: DEBUG nova.scheduler.client.report [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2389.489936] env[62507]: DEBUG nova.compute.resource_tracker [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62507) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2389.490118] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.251s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.491474] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.085336] env[62507]: WARNING oslo_vmware.rw_handles [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles response.begin() [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2416.085336] env[62507]: ERROR oslo_vmware.rw_handles [ 2416.086027] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Downloaded image file data 601dc712-1d53-404c-b128-df5971f300a1 to vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2416.087852] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Caching image {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2416.088180] env[62507]: DEBUG nova.virt.vmwareapi.vm_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Copying Virtual Disk [datastore2] vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk to [datastore2] vmware_temp/704ffc85-cede-414b-a34a-e17b2ac2db3a/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk {{(pid=62507) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2416.088514] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37fed533-ed9b-45b2-b9b1-e56088b98ea2 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.097416] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 2416.097416] env[62507]: value = "task-2460151" [ 2416.097416] env[62507]: _type = "Task" [ 2416.097416] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.105539] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.607446] env[62507]: DEBUG oslo_vmware.exceptions [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Fault InvalidArgument not matched. {{(pid=62507) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2416.607687] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2416.608270] env[62507]: ERROR nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2416.608270] env[62507]: Faults: ['InvalidArgument'] [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Traceback (most recent call last): [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] yield resources [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self.driver.spawn(context, instance, image_meta, [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self._fetch_image_if_missing(context, vi) [ 2416.608270] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] image_cache(vi, tmp_image_ds_loc) [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] vm_util.copy_virtual_disk( [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] session._wait_for_task(vmdk_copy_task) [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] return self.wait_for_task(task_ref) [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] return evt.wait() [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] result = hub.switch() [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2416.608606] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] return self.greenlet.switch() [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self.f(*self.args, **self.kw) [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] raise exceptions.translate_fault(task_info.error) [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Faults: ['InvalidArgument'] [ 2416.608976] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] [ 2416.608976] env[62507]: INFO nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Terminating instance [ 2416.610273] env[62507]: DEBUG oslo_concurrency.lockutils [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/601dc712-1d53-404c-b128-df5971f300a1/601dc712-1d53-404c-b128-df5971f300a1.vmdk" {{(pid=62507) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2416.610481] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2416.610721] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e27642e-43c3-4800-97e6-a0d5fd234960 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.613054] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2416.613250] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2416.613950] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7b136a-cc0b-4ec5-9722-69028e6ffcd4 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.620729] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Unregistering the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2416.620935] env[62507]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86a9a81d-c3c2-4d91-bd8a-ac5ad5b8eb59 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.622941] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2416.623147] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62507) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2416.624098] env[62507]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d015e51-fcb8-49f2-94a7-43ce6032bff1 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.628956] env[62507]: DEBUG oslo_vmware.api [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Waiting for the task: (returnval){ [ 2416.628956] env[62507]: value = "session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520ebb04-42ce-2f79-3053-05e4592e270f" [ 2416.628956] env[62507]: _type = "Task" [ 2416.628956] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.637124] env[62507]: DEBUG oslo_vmware.api [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Task: {'id': session[5227f57b-bd65-8ecc-7f9f-f48f44af6ba0]520ebb04-42ce-2f79-3053-05e4592e270f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.687127] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Unregistered the VM {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2416.687592] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Deleting contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2416.687831] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleting the datastore file [datastore2] 56c176ec-c6e5-4f48-a5be-badef25c5667 {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2416.688126] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c604365c-0407-4015-b8ad-06b1fcf1b02e {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.694464] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for the task: (returnval){ [ 2416.694464] env[62507]: value = "task-2460153" [ 2416.694464] env[62507]: _type = "Task" [ 2416.694464] env[62507]: } to complete. {{(pid=62507) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.702310] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460153, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2417.139592] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Preparing fetch location {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2417.139988] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating directory with path [datastore2] vmware_temp/3e5e4316-301b-4ac7-9bd8-0f9e016d8eb8/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2417.140182] env[62507]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f93cc20f-0868-4c2b-8523-1fd00dd20329 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.151382] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Created directory with path [datastore2] vmware_temp/3e5e4316-301b-4ac7-9bd8-0f9e016d8eb8/601dc712-1d53-404c-b128-df5971f300a1 {{(pid=62507) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2417.151585] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Fetch image to [datastore2] vmware_temp/3e5e4316-301b-4ac7-9bd8-0f9e016d8eb8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk {{(pid=62507) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2417.151752] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to [datastore2] vmware_temp/3e5e4316-301b-4ac7-9bd8-0f9e016d8eb8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk on the data store datastore2 {{(pid=62507) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2417.152505] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3597c57c-642f-4781-94c4-644d6e8dbb3b {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.159221] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f753de15-77fc-4a4f-b483-a99ed205a82f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.168165] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e92f59-59de-4aa0-af5d-d368f72186c6 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.202702] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd71531-306f-4ab1-8016-5a865df5b215 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.209526] env[62507]: DEBUG oslo_vmware.api [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Task: {'id': task-2460153, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077243} completed successfully. {{(pid=62507) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2417.210967] env[62507]: DEBUG nova.virt.vmwareapi.ds_util [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleted the datastore file {{(pid=62507) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2417.211202] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Deleted contents of the VM from datastore datastore2 {{(pid=62507) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2417.211383] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2417.211560] env[62507]: INFO nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2417.213618] env[62507]: DEBUG nova.compute.claims [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Aborting claim: {{(pid=62507) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2417.213785] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.214088] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2417.216835] env[62507]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ee3d463b-1958-4ba1-99ee-68791cce315d {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.239295] env[62507]: DEBUG nova.virt.vmwareapi.images [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Downloading image file data 601dc712-1d53-404c-b128-df5971f300a1 to the data store datastore2 {{(pid=62507) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2417.398016] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2422158d-92b5-4aaa-9031-57f9f3ea2b0f {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.405145] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f8bd79-8020-4eca-9453-f6e2f1da8157 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.410150] env[62507]: DEBUG oslo_vmware.rw_handles [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3e5e4316-301b-4ac7-9bd8-0f9e016d8eb8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2417.436828] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6593c91-a6e3-41b5-818c-124cb589d521 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.499372] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee9c68a-9d1c-4ec6-bc85-65fcf5f40216 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.504812] env[62507]: DEBUG oslo_vmware.rw_handles [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Completed reading data from the image iterator. {{(pid=62507) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2417.504812] env[62507]: DEBUG oslo_vmware.rw_handles [None req-417aec43-693a-4316-992d-f0a77d7c3293 tempest-ImagesTestJSON-1935977095 tempest-ImagesTestJSON-1935977095-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3e5e4316-301b-4ac7-9bd8-0f9e016d8eb8/601dc712-1d53-404c-b128-df5971f300a1/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62507) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2417.514196] env[62507]: DEBUG nova.compute.provider_tree [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed in ProviderTree for provider: 40e67440-0925-46e5-9b58-6e63187cdfab {{(pid=62507) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2417.524218] env[62507]: DEBUG nova.scheduler.client.report [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Inventory has not changed for provider 40e67440-0925-46e5-9b58-6e63187cdfab based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62507) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2417.541148] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2417.541699] env[62507]: ERROR nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2417.541699] env[62507]: Faults: ['InvalidArgument'] [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Traceback (most recent call last): [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self.driver.spawn(context, instance, image_meta, [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self._fetch_image_if_missing(context, vi) [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] image_cache(vi, tmp_image_ds_loc) [ 2417.541699] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] vm_util.copy_virtual_disk( [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] session._wait_for_task(vmdk_copy_task) [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] return self.wait_for_task(task_ref) [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] return evt.wait() [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] result = hub.switch() [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] return self.greenlet.switch() [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2417.543203] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] self.f(*self.args, **self.kw) [ 2417.543522] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2417.543522] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] raise exceptions.translate_fault(task_info.error) [ 2417.543522] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2417.543522] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Faults: ['InvalidArgument'] [ 2417.543522] env[62507]: ERROR nova.compute.manager [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] [ 2417.543522] env[62507]: DEBUG nova.compute.utils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] VimFaultException {{(pid=62507) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2417.547823] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Build of instance 56c176ec-c6e5-4f48-a5be-badef25c5667 was re-scheduled: A specified parameter was not correct: fileType [ 2417.547823] env[62507]: Faults: ['InvalidArgument'] {{(pid=62507) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2417.548234] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Unplugging VIFs for instance {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2417.548406] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62507) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2417.548574] env[62507]: DEBUG nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2417.548735] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2417.935588] env[62507]: DEBUG nova.network.neutron [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2417.950411] env[62507]: INFO nova.compute.manager [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Took 0.40 seconds to deallocate network for instance. [ 2418.052745] env[62507]: INFO nova.scheduler.client.report [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Deleted allocations for instance 56c176ec-c6e5-4f48-a5be-badef25c5667 [ 2418.076133] env[62507]: DEBUG oslo_concurrency.lockutils [None req-8874039c-eeab-4269-9929-e4e48fab0587 tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 666.141s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.076436] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 469.656s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.076674] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Acquiring lock "56c176ec-c6e5-4f48-a5be-badef25c5667-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2418.076891] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.077107] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.079335] env[62507]: INFO nova.compute.manager [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Terminating instance [ 2418.081041] env[62507]: DEBUG nova.compute.manager [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Start destroying the instance on the hypervisor. {{(pid=62507) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2418.081274] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Destroying instance {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2418.081914] env[62507]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9041fa31-0698-4aad-864e-ad3c4dca2172 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.090836] env[62507]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b5db68-0733-4d19-81df-2d9338bd3108 {{(pid=62507) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.119407] env[62507]: WARNING nova.virt.vmwareapi.vmops [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56c176ec-c6e5-4f48-a5be-badef25c5667 could not be found. [ 2418.119603] env[62507]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Instance destroyed {{(pid=62507) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2418.119787] env[62507]: INFO nova.compute.manager [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2418.120071] env[62507]: DEBUG oslo.service.loopingcall [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62507) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2418.120315] env[62507]: DEBUG nova.compute.manager [-] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Deallocating network for instance {{(pid=62507) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2418.120411] env[62507]: DEBUG nova.network.neutron [-] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] deallocate_for_instance() {{(pid=62507) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2418.143924] env[62507]: DEBUG nova.network.neutron [-] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Updating instance_info_cache with network_info: [] {{(pid=62507) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2418.152803] env[62507]: INFO nova.compute.manager [-] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] Took 0.03 seconds to deallocate network for instance. [ 2418.239354] env[62507]: DEBUG oslo_concurrency.lockutils [None req-7ee1b77f-d2e9-451a-a572-c0813a5edeac tempest-DeleteServersTestJSON-2119130755 tempest-DeleteServersTestJSON-2119130755-project-member] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.240835] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 62.867s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.241055] env[62507]: INFO nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 56c176ec-c6e5-4f48-a5be-badef25c5667] During sync_power_state the instance has a pending task (deleting). Skip. [ 2418.241255] env[62507]: DEBUG oslo_concurrency.lockutils [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Lock "56c176ec-c6e5-4f48-a5be-badef25c5667" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62507) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2444.167654] env[62507]: DEBUG oslo_service.periodic_task [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62507) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.167983] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Starting heal instance info cache {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2444.167983] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Rebuilding the list of instances to heal {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2444.187129] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 425b5171-97c2-4700-ad5f-c79aadb39eae] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.187375] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 479a1e78-23c0-4a96-aa72-aa419c8c251b] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.187525] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 7adef000-4700-4c2f-a7ea-09baf40cedf5] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.187656] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 33af3273-6d4b-435d-8c40-cdbac591a84f] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.187782] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: d27a54c7-96d0-467c-8cb2-8b23de43c107] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.187905] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 15b28973-c067-45be-ad64-b9315f033824] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.188208] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: b609718a-b93b-4398-9ffd-1a4dca9f0753] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.188370] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] [instance: 9f75f6ca-35a2-44d8-8b2a-ecf14b236421] Skipping network cache update for instance because it is Building. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2444.188502] env[62507]: DEBUG nova.compute.manager [None req-c0f03a4e-4b82-479a-b27c-067920e53a22 None None] Didn't find any instances for network info cache update. {{(pid=62507) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}}